From 6b624abd8c48b37e08af6dcc4f724480fd591868 Mon Sep 17 00:00:00 2001
From: JeshuaRen <270813223@qq.com>
Date: Thu, 11 Apr 2024 14:33:02 +0800
Subject: [PATCH] =?UTF-8?q?'=E6=96=B0=E5=A2=9E=E4=BA=86=E4=BB=A3=E7=A0=81?=
=?UTF-8?q?=E6=B3=A8=E9=87=8A'?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.idea/.gitignore | 8 +
.idea/modules.xml | 8 +
.idea/storage.iml | 9 +
.idea/vcs.xml | 6 +
agent/internal/grpc/io.go | 32 +-
agent/internal/grpc/ping.go | 9 +
agent/internal/grpc/service.go | 38 +-
agent/internal/mq/agent.go | 12 +
agent/internal/mq/cache.go | 54 +-
agent/internal/mq/io.go | 12 +
agent/internal/mq/object.go | 9 +
agent/internal/mq/service.go | 12 +-
agent/internal/mq/storage.go | 121 +-
agent/internal/task/cache_move_package.go | 20 +-
agent/internal/task/create_package.go | 25 +
agent/internal/task/execute_io_plan.go | 35 +-
agent/internal/task/ipfs_pin.go | 21 +-
agent/internal/task/ipfs_read.go | 26 +-
agent/internal/task/storage_load_package.go | 122 +-
agent/internal/task/task.go | 17 +-
agent/main.go | 34 +-
client/internal/cmdline/bucket.go | 23 +
client/internal/cmdline/cache.go | 16 +
client/internal/cmdline/commandline.go | 19 +-
client/internal/cmdline/distlock.go | 16 +
client/internal/cmdline/object.go | 21 +
client/internal/cmdline/package.go | 65 +-
client/internal/cmdline/scanner.go | 19 +
client/internal/cmdline/serve.go | 8 +
client/internal/cmdline/storage.go | 21 +
client/internal/config/config.go | 2 +
client/internal/http/bucket.go | 16 +
client/internal/http/cache.go | 13 +
client/internal/http/node.go | 16 +
client/internal/http/object.go | 35 +-
client/internal/http/package.go | 17 +
client/internal/http/server.go | 52 +-
client/internal/http/storage.go | 11 +
client/internal/services/agent.go | 13 +-
client/internal/services/bucket.go | 40 +-
client/internal/services/cache.go | 22 +
client/internal/services/node.go | 17 +-
client/internal/services/object.go | 28 +-
client/internal/services/package.go | 22 +
client/internal/services/scanner.go | 12 +-
client/internal/services/service.go | 22 +-
client/internal/services/storage.go | 61 +-
client/internal/task/task.go | 16 +-
client/internal/task/upload_objects.go | 34 +-
client/main.go | 37 +-
common/globals/globals.go | 4 +
common/globals/pools.go | 8 +
common/pkgs/cmd/download_package.go | 40 +-
common/pkgs/cmd/upload_objects.go | 81 +-
common/pkgs/db/object.go | 22 +-
common/pkgs/mq/coordinator/package.go | 2 +
coordinator/internal/mq/package.go | 87 +-
coordinator/main.go | 13 +-
go.mod | 47 +-
go.sum | 87 +-
log/agent.log | 870 ++++++
log/coordinator.log | 17 +
log/scanner.log | 2659 +++++++++++++++++
scanner/internal/event/agent_cache_gc.go | 17 +-
scanner/internal/event/agent_check_cache.go | 17 +-
scanner/internal/event/agent_check_state.go | 21 +-
scanner/internal/event/agent_check_storage.go | 16 +-
scanner/internal/event/agent_storage_gc.go | 19 +-
scanner/main.go | 34 +-
69 files changed, 5103 insertions(+), 282 deletions(-)
create mode 100644 .idea/.gitignore
create mode 100644 .idea/modules.xml
create mode 100644 .idea/storage.iml
create mode 100644 .idea/vcs.xml
create mode 100644 log/agent.log
create mode 100644 log/coordinator.log
create mode 100644 log/scanner.log
diff --git a/.idea/.gitignore b/.idea/.gitignore
new file mode 100644
index 0000000..13566b8
--- /dev/null
+++ b/.idea/.gitignore
@@ -0,0 +1,8 @@
+# Default ignored files
+/shelf/
+/workspace.xml
+# Editor-based HTTP Client requests
+/httpRequests/
+# Datasource local storage ignored files
+/dataSources/
+/dataSources.local.xml
diff --git a/.idea/modules.xml b/.idea/modules.xml
new file mode 100644
index 0000000..c2b91a4
--- /dev/null
+++ b/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/storage.iml b/.idea/storage.iml
new file mode 100644
index 0000000..5e764c4
--- /dev/null
+++ b/.idea/storage.iml
@@ -0,0 +1,9 @@
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/vcs.xml b/.idea/vcs.xml
new file mode 100644
index 0000000..35eb1dd
--- /dev/null
+++ b/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/agent/internal/grpc/io.go b/agent/internal/grpc/io.go
index 8d016a7..8ee17a3 100644
--- a/agent/internal/grpc/io.go
+++ b/agent/internal/grpc/io.go
@@ -10,11 +10,17 @@ import (
"gitlink.org.cn/cloudream/storage/common/pkgs/ioswitch"
)
+// SendStream 接收客户端通过流式传输发送的文件数据。
+//
+// server: 代表服务端发送流的接口,用于接收和响应客户端请求。
+// 返回值: 返回错误信息,如果处理成功则返回nil。
func (s *Service) SendStream(server agentserver.Agent_SendStreamServer) error {
+ // 接收流式传输的初始化信息包
msg, err := server.Recv()
if err != nil {
return fmt.Errorf("recving stream id packet: %w", err)
}
+ // 校验初始化信息包类型
if msg.Type != agentserver.StreamDataPacketType_SendArgs {
return fmt.Errorf("first packet must be a SendArgs packet")
}
@@ -26,26 +32,25 @@ func (s *Service) SendStream(server agentserver.Agent_SendStreamServer) error {
pr, pw := io.Pipe()
+ // 通知系统,流式传输已准备就绪
s.sw.StreamReady(ioswitch.PlanID(msg.PlanID), ioswitch.NewStream(ioswitch.StreamID(msg.StreamID), pr))
- // 然后读取文件数据
+ // 循环接收客户端发送的文件数据
var recvSize int64
for {
msg, err := server.Recv()
- // 读取客户端数据失败
- // 即使err是io.EOF,只要没有收到客户端包含EOF数据包就被断开了连接,就认为接收失败
+ // 处理接收数据错误
if err != nil {
- // 关闭文件写入,不需要返回的hash和error
pw.CloseWithError(io.ErrClosedPipe)
logger.WithField("ReceiveSize", recvSize).
Warnf("recv message failed, err: %s", err.Error())
return fmt.Errorf("recv message failed, err: %w", err)
}
+ // 将接收到的数据写入管道
err = myio.WriteAll(pw, msg.Data)
if err != nil {
- // 关闭文件写入,不需要返回的hash和error
pw.CloseWithError(io.ErrClosedPipe)
logger.Warnf("write data to file failed, err: %s", err.Error())
return fmt.Errorf("write data to file failed, err: %w", err)
@@ -53,15 +58,15 @@ func (s *Service) SendStream(server agentserver.Agent_SendStreamServer) error {
recvSize += int64(len(msg.Data))
+ // 当接收到EOF信息时,结束写入并返回
if msg.Type == agentserver.StreamDataPacketType_EOF {
- // 客户端明确说明文件传输已经结束,那么结束写入,获得文件Hash
err := pw.Close()
if err != nil {
logger.Warnf("finish writing failed, err: %s", err.Error())
return fmt.Errorf("finish writing failed, err: %w", err)
}
- // 并将结果返回到客户端
+ // 向客户端发送传输完成的响应
err = server.SendAndClose(&agentserver.SendStreamResp{})
if err != nil {
logger.Warnf("send response failed, err: %s", err.Error())
@@ -73,12 +78,18 @@ func (s *Service) SendStream(server agentserver.Agent_SendStreamServer) error {
}
}
+// FetchStream 从服务端获取流式数据并发送给客户端。
+//
+// req: 包含获取流式数据所需的计划ID和流ID的请求信息。
+// server: 用于向客户端发送流数据的服务器接口。
+// 返回值: 返回处理过程中出现的任何错误。
func (s *Service) FetchStream(req *agentserver.FetchStreamReq, server agentserver.Agent_FetchStreamServer) error {
logger.
WithField("PlanID", req.PlanID).
WithField("StreamID", req.StreamID).
Debugf("send stream by grpc")
+ // 等待对应的流数据准备就绪
strs, err := s.sw.WaitStreams(ioswitch.PlanID(req.PlanID), ioswitch.StreamID(req.StreamID))
if err != nil {
logger.
@@ -91,6 +102,7 @@ func (s *Service) FetchStream(req *agentserver.FetchStreamReq, server agentserve
reader := strs[0].Stream
defer reader.Close()
+ // 读取流数据并发送给客户端
buf := make([]byte, 4096)
readAllCnt := 0
for {
@@ -111,20 +123,20 @@ func (s *Service) FetchStream(req *agentserver.FetchStreamReq, server agentserve
}
}
- // 文件读取完毕
+ // 当读取完毕或遇到EOF时返回
if err == io.EOF {
logger.
WithField("PlanID", req.PlanID).
WithField("StreamID", req.StreamID).
Debugf("send data size %d", readAllCnt)
- // 发送EOF消息
+ // 发送EOF消息通知客户端数据传输完成
server.Send(&agentserver.StreamDataPacket{
Type: agentserver.StreamDataPacketType_EOF,
})
return nil
}
- // io.ErrUnexpectedEOF没有读满整个buf就遇到了EOF,此时正常发送剩余数据即可。除了这两个错误之外,其他错误都中断操作
+ // 处理除EOF和io.ErrUnexpectedEOF之外的读取错误
if err != nil && err != io.ErrUnexpectedEOF {
logger.
WithField("PlanID", req.PlanID).
diff --git a/agent/internal/grpc/ping.go b/agent/internal/grpc/ping.go
index ff0fd20..b06ebcc 100644
--- a/agent/internal/grpc/ping.go
+++ b/agent/internal/grpc/ping.go
@@ -6,6 +6,15 @@ import (
agtrpc "gitlink.org.cn/cloudream/storage/common/pkgs/grpc/agent"
)
+// Ping 是一个RPC方法,用于验证服务的可用性。
+//
+// 参数:
+// context.Context: 传递上下文信息,包括请求的元数据和取消信号。
+// *agtrpc.PingReq: 传递的Ping请求数据,当前实现中未使用。
+//
+// 返回值:
+// *agtrpc.PingResp: Ping响应数据,当前实现中始终返回空响应。
+// error: 如果处理过程中出现错误,则返回错误信息;否则返回nil。
func (s *Service) Ping(context.Context, *agtrpc.PingReq) (*agtrpc.PingResp, error) {
return &agtrpc.PingResp{}, nil
}
diff --git a/agent/internal/grpc/service.go b/agent/internal/grpc/service.go
index 2e61eb3..669a20c 100644
--- a/agent/internal/grpc/service.go
+++ b/agent/internal/grpc/service.go
@@ -11,67 +11,63 @@ import (
"gitlink.org.cn/cloudream/storage/common/pkgs/ioswitch"
)
+// Service 类定义了与agent服务相关的操作
type Service struct {
agentserver.AgentServer
sw *ioswitch.Switch
}
+// NewService 创建并返回一个新的Service实例
func NewService(sw *ioswitch.Switch) *Service {
return &Service{
sw: sw,
}
}
+// SendIPFSFile 处理客户端上传文件到IPFS的请求
func (s *Service) SendIPFSFile(server agentserver.Agent_SendIPFSFileServer) error {
log.Debugf("client upload file")
- ipfsCli, err := stgglb.IPFSPool.Acquire()
+ ipfsCli, err := stgglb.IPFSPool.Acquire() // 获取一个IPFS客户端实例
if err != nil {
log.Warnf("new ipfs client: %s", err.Error())
return fmt.Errorf("new ipfs client: %w", err)
}
defer ipfsCli.Close()
- writer, err := ipfsCli.CreateFileStream()
+ writer, err := ipfsCli.CreateFileStream() // 在IPFS上创建一个文件流
if err != nil {
log.Warnf("create file failed, err: %s", err.Error())
return fmt.Errorf("create file failed, err: %w", err)
}
- // 然后读取文件数据
var recvSize int64
for {
- msg, err := server.Recv()
+ msg, err := server.Recv() // 接收客户端发送的文件数据
- // 读取客户端数据失败
- // 即使err是io.EOF,只要没有收到客户端包含EOF数据包就被断开了连接,就认为接收失败
if err != nil {
- // 关闭文件写入,不需要返回的hash和error
- writer.Abort(io.ErrClosedPipe)
+ writer.Abort(io.ErrClosedPipe) // 出错时关闭文件写入
log.WithField("ReceiveSize", recvSize).
Warnf("recv message failed, err: %s", err.Error())
return fmt.Errorf("recv message failed, err: %w", err)
}
- err = myio.WriteAll(writer, msg.Data)
+ err = myio.WriteAll(writer, msg.Data) // 将数据写入IPFS文件流
if err != nil {
- // 关闭文件写入,不需要返回的hash和error
- writer.Abort(io.ErrClosedPipe)
+ writer.Abort(io.ErrClosedPipe) // 写入出错时关闭文件写入
log.Warnf("write data to file failed, err: %s", err.Error())
return fmt.Errorf("write data to file failed, err: %w", err)
}
recvSize += int64(len(msg.Data))
- if msg.Type == agentserver.StreamDataPacketType_EOF {
- // 客户端明确说明文件传输已经结束,那么结束写入,获得文件Hash
+ if msg.Type == agentserver.StreamDataPacketType_EOF { // 当接收到EOF标志时,结束文件写入并返回文件Hash
hash, err := writer.Finish()
if err != nil {
log.Warnf("finish writing failed, err: %s", err.Error())
return fmt.Errorf("finish writing failed, err: %w", err)
}
- // 并将结果返回到客户端
err = server.SendAndClose(&agentserver.SendIPFSFileResp{
FileHash: hash,
})
@@ -86,17 +82,18 @@ func (s *Service) SendIPFSFile(server agentserver.Agent_SendIPFSFileServer) erro
}
}
+// GetIPFSFile 处理客户端从IPFS下载文件的请求
func (s *Service) GetIPFSFile(req *agentserver.GetIPFSFileReq, server agentserver.Agent_GetIPFSFileServer) error {
log.WithField("FileHash", req.FileHash).Debugf("client download file")
- ipfsCli, err := stgglb.IPFSPool.Acquire()
+ ipfsCli, err := stgglb.IPFSPool.Acquire() // 获取一个IPFS客户端实例
if err != nil {
log.Warnf("new ipfs client: %s", err.Error())
return fmt.Errorf("new ipfs client: %w", err)
}
defer ipfsCli.Close()
- reader, err := ipfsCli.OpenRead(req.FileHash)
+ reader, err := ipfsCli.OpenRead(req.FileHash) // 通过文件Hash打开一个读取流
if err != nil {
log.Warnf("open file %s to read failed, err: %s", req.FileHash, err.Error())
return fmt.Errorf("open file to read failed, err: %w", err)
@@ -106,7 +103,7 @@ func (s *Service) GetIPFSFile(req *agentserver.GetIPFSFileReq, server agentserve
buf := make([]byte, 1024)
readAllCnt := 0
for {
- readCnt, err := reader.Read(buf)
+ readCnt, err := reader.Read(buf) // 从IPFS读取数据
if readCnt > 0 {
readAllCnt += readCnt
@@ -121,18 +118,15 @@ func (s *Service) GetIPFSFile(req *agentserver.GetIPFSFileReq, server agentserve
}
}
- // 文件读取完毕
- if err == io.EOF {
+ if err == io.EOF { // 当读取完毕时,发送EOF标志并返回
log.WithField("FileHash", req.FileHash).Debugf("send data size %d", readAllCnt)
- // 发送EOF消息
server.Send(&agentserver.FileDataPacket{
Type: agentserver.StreamDataPacketType_EOF,
})
return nil
}
- // io.ErrUnexpectedEOF没有读满整个buf就遇到了EOF,此时正常发送剩余数据即可。除了这两个错误之外,其他错误都中断操作
- if err != nil && err != io.ErrUnexpectedEOF {
+ if err != nil && err != io.ErrUnexpectedEOF { // 遇到除EOF和ErrUnexpectedEOF外的其他错误,中断操作
log.Warnf("read file %s data failed, err: %s", req.FileHash, err.Error())
return fmt.Errorf("read file data failed, err: %w", err)
}
diff --git a/agent/internal/mq/agent.go b/agent/internal/mq/agent.go
index b442beb..b5b0db1 100644
--- a/agent/internal/mq/agent.go
+++ b/agent/internal/mq/agent.go
@@ -8,22 +8,34 @@ import (
agtmq "gitlink.org.cn/cloudream/storage/common/pkgs/mq/agent"
)
+// GetState 用于获取IPFS节点的状态
+// 参数:
+// msg: 包含请求信息的GetState消息结构体
+// 返回值:
+// *agtmq.GetStateResp: 包含响应信息的GetStateResp消息结构体
+// *mq.CodeMessage: 错误代码和消息
func (svc *Service) GetState(msg *agtmq.GetState) (*agtmq.GetStateResp, *mq.CodeMessage) {
var ipfsState string
+ // 尝试从IPFS池中获取一个客户端实例
ipfsCli, err := stgglb.IPFSPool.Acquire()
if err != nil {
+ // 如果获取失败,记录警告信息,并设置IPFS状态为不可用
logger.Warnf("new ipfs client: %s", err.Error())
ipfsState = consts.IPFSStateUnavailable
} else {
+ // 如果获取成功,检查IPFS节点是否正常
if ipfsCli.IsUp() {
ipfsState = consts.IPFSStateOK
} else {
+ // 如果节点不正常,设置IPFS状态为不可用
ipfsState = consts.IPFSStateUnavailable
}
+ // 释放IPFS客户端实例
ipfsCli.Close()
}
+ // 构造并返回响应
return mq.ReplyOK(agtmq.NewGetStateResp(ipfsState))
}
diff --git a/agent/internal/mq/cache.go b/agent/internal/mq/cache.go
index fd18684..c723ab7 100644
--- a/agent/internal/mq/cache.go
+++ b/agent/internal/mq/cache.go
@@ -12,81 +12,93 @@ import (
agtmq "gitlink.org.cn/cloudream/storage/common/pkgs/mq/agent"
)
+// CheckCache 检查IPFS缓存
+// 参数 msg: 包含检查缓存请求信息的结构体
+// 返回值: 检查缓存响应结构体和错误信息
func (svc *Service) CheckCache(msg *agtmq.CheckCache) (*agtmq.CheckCacheResp, *mq.CodeMessage) {
- ipfsCli, err := stgglb.IPFSPool.Acquire()
+ ipfsCli, err := stgglb.IPFSPool.Acquire() // 尝试从IPFS池获取客户端
if err != nil {
logger.Warnf("new ipfs client: %s", err.Error())
return nil, mq.Failed(errorcode.OperationFailed, "new ipfs client failed")
}
- defer ipfsCli.Close()
+ defer ipfsCli.Close() // 确保IPFS客户端被正确关闭
- files, err := ipfsCli.GetPinnedFiles()
+ files, err := ipfsCli.GetPinnedFiles() // 获取IPFS上被固定的文件列表
if err != nil {
logger.Warnf("get pinned files from ipfs failed, err: %s", err.Error())
return nil, mq.Failed(errorcode.OperationFailed, "get pinned files from ipfs failed")
}
- return mq.ReplyOK(agtmq.NewCheckCacheResp(lo.Keys(files)))
+ return mq.ReplyOK(agtmq.NewCheckCacheResp(lo.Keys(files))) // 返回文件列表的键
}
+// CacheGC 执行缓存垃圾回收
+// 参数 msg: 包含垃圾回收请求信息的结构体
+// 返回值: 垃圾回收响应结构体和错误信息
func (svc *Service) CacheGC(msg *agtmq.CacheGC) (*agtmq.CacheGCResp, *mq.CodeMessage) {
- ipfsCli, err := stgglb.IPFSPool.Acquire()
+ ipfsCli, err := stgglb.IPFSPool.Acquire() // 尝试从IPFS池获取客户端
if err != nil {
logger.Warnf("new ipfs client: %s", err.Error())
return nil, mq.Failed(errorcode.OperationFailed, "new ipfs client failed")
}
- defer ipfsCli.Close()
+ defer ipfsCli.Close() // 确保IPFS客户端被正确关闭
- files, err := ipfsCli.GetPinnedFiles()
+ files, err := ipfsCli.GetPinnedFiles() // 获取IPFS上被固定的文件列表
if err != nil {
logger.Warnf("get pinned files from ipfs failed, err: %s", err.Error())
return nil, mq.Failed(errorcode.OperationFailed, "get pinned files from ipfs failed")
}
- // unpin所有没有没记录到元数据的文件
+ // 根据请求对比当前被固定的文件,将未记录到元数据的文件取消固定
shouldPinnedFiles := lo.SliceToMap(msg.PinnedFileHashes, func(hash string) (string, bool) { return hash, true })
for hash := range files {
if !shouldPinnedFiles[hash] {
- ipfsCli.Unpin(hash)
+ ipfsCli.Unpin(hash) // 取消固定文件
logger.WithField("FileHash", hash).Debugf("unpinned by gc")
}
}
- return mq.ReplyOK(agtmq.RespCacheGC())
+ return mq.ReplyOK(agtmq.RespCacheGC()) // 返回垃圾回收完成的响应
}
+// StartCacheMovePackage 开始缓存移动包
+// 参数 msg: 包含启动缓存移动请求信息的结构体
+// 返回值: 启动缓存移动响应结构体和错误信息
func (svc *Service) StartCacheMovePackage(msg *agtmq.StartCacheMovePackage) (*agtmq.StartCacheMovePackageResp, *mq.CodeMessage) {
- tsk := svc.taskManager.StartNew(mytask.NewCacheMovePackage(msg.UserID, msg.PackageID))
- return mq.ReplyOK(agtmq.NewStartCacheMovePackageResp(tsk.ID()))
+ tsk := svc.taskManager.StartNew(mytask.NewCacheMovePackage(msg.UserID, msg.PackageID)) // 启动新的缓存移动任务
+ return mq.ReplyOK(agtmq.NewStartCacheMovePackageResp(tsk.ID())) // 返回任务ID
}
+// WaitCacheMovePackage 等待缓存移动包完成
+// 参数 msg: 包含等待缓存移动请求信息的结构体
+// 返回值: 等待缓存移动响应结构体和错误信息
func (svc *Service) WaitCacheMovePackage(msg *agtmq.WaitCacheMovePackage) (*agtmq.WaitCacheMovePackageResp, *mq.CodeMessage) {
- tsk := svc.taskManager.FindByID(msg.TaskID)
+ tsk := svc.taskManager.FindByID(msg.TaskID) // 根据任务ID查找任务
if tsk == nil {
- return nil, mq.Failed(errorcode.TaskNotFound, "task not found")
+ return nil, mq.Failed(errorcode.TaskNotFound, "task not found") // 如果任务不存在,返回错误
}
if msg.WaitTimeoutMs == 0 {
- tsk.Wait()
+ tsk.Wait() // 等待任务完成
errMsg := ""
if tsk.Error() != nil {
- errMsg = tsk.Error().Error()
+ errMsg = tsk.Error().Error() // 获取任务错误信息
}
- return mq.ReplyOK(agtmq.NewWaitCacheMovePackageResp(true, errMsg))
+ return mq.ReplyOK(agtmq.NewWaitCacheMovePackageResp(true, errMsg)) // 返回任务完成状态和错误信息
} else {
- if tsk.WaitTimeout(time.Duration(msg.WaitTimeoutMs) * time.Millisecond) {
+ if tsk.WaitTimeout(time.Duration(msg.WaitTimeoutMs) * time.Millisecond) { // 设置等待超时
errMsg := ""
if tsk.Error() != nil {
- errMsg = tsk.Error().Error()
+ errMsg = tsk.Error().Error() // 获取任务错误信息
}
- return mq.ReplyOK(agtmq.NewWaitCacheMovePackageResp(true, errMsg))
+ return mq.ReplyOK(agtmq.NewWaitCacheMovePackageResp(true, errMsg)) // 返回任务完成状态和错误信息
}
- return mq.ReplyOK(agtmq.NewWaitCacheMovePackageResp(false, ""))
+ return mq.ReplyOK(agtmq.NewWaitCacheMovePackageResp(false, "")) // 返回等待超时状态
}
}
diff --git a/agent/internal/mq/io.go b/agent/internal/mq/io.go
index 86e8e74..a5adac2 100644
--- a/agent/internal/mq/io.go
+++ b/agent/internal/mq/io.go
@@ -11,6 +11,9 @@ import (
agtmq "gitlink.org.cn/cloudream/storage/common/pkgs/mq/agent"
)
+// SetupIOPlan 设置I/O计划。
+// msg: 包含I/O计划信息的消息体。
+// 返回值: 成功时返回响应消息和成功标志,失败时返回错误代码和消息。
func (svc *Service) SetupIOPlan(msg *agtmq.SetupIOPlan) (*agtmq.SetupIOPlanResp, *mq.CodeMessage) {
err := svc.sw.SetupPlan(msg.Plan)
if err != nil {
@@ -21,11 +24,17 @@ func (svc *Service) SetupIOPlan(msg *agtmq.SetupIOPlan) (*agtmq.SetupIOPlanResp,
return mq.ReplyOK(agtmq.NewSetupIOPlanResp())
}
+// StartIOPlan 启动I/O计划。
+// msg: 包含I/O计划ID的消息体。
+// 返回值: 成功时返回任务ID和成功标志,失败时返回错误代码和消息。
func (svc *Service) StartIOPlan(msg *agtmq.StartIOPlan) (*agtmq.StartIOPlanResp, *mq.CodeMessage) {
tsk := svc.taskManager.StartNew(mytask.NewExecuteIOPlan(msg.PlanID))
return mq.ReplyOK(agtmq.NewStartIOPlanResp(tsk.ID()))
}
+// WaitIOPlan 等待I/O计划完成。
+// msg: 包含任务ID和等待超时时间的消息体。
+// 返回值: 成功时返回任务完成状态、错误消息和结果,失败时返回错误代码和消息。
func (svc *Service) WaitIOPlan(msg *agtmq.WaitIOPlan) (*agtmq.WaitIOPlanResp, *mq.CodeMessage) {
tsk := svc.taskManager.FindByID(msg.TaskID)
if tsk == nil {
@@ -59,6 +68,9 @@ func (svc *Service) WaitIOPlan(msg *agtmq.WaitIOPlan) (*agtmq.WaitIOPlanResp, *m
}
}
+// CancelIOPlan 取消I/O计划。
+// msg: 包含要取消的I/O计划ID的消息体。
+// 返回值: 成功时返回响应消息和成功标志,失败时返回错误代码和消息。
func (svc *Service) CancelIOPlan(msg *agtmq.CancelIOPlan) (*agtmq.CancelIOPlanResp, *mq.CodeMessage) {
svc.sw.CancelPlan(msg.PlanID)
return mq.ReplyOK(agtmq.NewCancelIOPlanResp())
diff --git a/agent/internal/mq/object.go b/agent/internal/mq/object.go
index dbdaacc..ca963ca 100644
--- a/agent/internal/mq/object.go
+++ b/agent/internal/mq/object.go
@@ -8,21 +8,30 @@ import (
agtmq "gitlink.org.cn/cloudream/storage/common/pkgs/mq/agent"
)
+// PinObject 用于处理对象固定(pin)的请求。
+// msg: 包含要固定的对象的文件哈希和是否为后台任务的标志。
+// 返回值1: 成功时返回固定操作的响应信息。
+// 返回值2: 操作失败时返回错误码和错误信息。
func (svc *Service) PinObject(msg *agtmq.PinObject) (*agtmq.PinObjectResp, *mq.CodeMessage) {
+ // 开始记录固定对象操作的日志
logger.WithField("FileHash", msg.FileHashes).Debugf("pin object")
+ // 启动一个新的任务来处理IPFS固定操作
tsk := svc.taskManager.StartNew(task.NewIPFSPin(msg.FileHashes))
+ // 检查任务是否出错,若有错误则记录日志并返回操作失败的信息
if tsk.Error() != nil {
logger.WithField("FileHash", msg.FileHashes).
Warnf("pin object failed, err: %s", tsk.Error().Error())
return nil, mq.Failed(errorcode.OperationFailed, "pin object failed")
}
+ // 如果是后台任务,则直接返回成功响应,不等待任务完成
if msg.IsBackground {
return mq.ReplyOK(agtmq.RespPinObject())
}
+ // 等待任务完成
tsk.Wait()
return mq.ReplyOK(agtmq.RespPinObject())
}
diff --git a/agent/internal/mq/service.go b/agent/internal/mq/service.go
index 55789d4..e433697 100644
--- a/agent/internal/mq/service.go
+++ b/agent/internal/mq/service.go
@@ -5,11 +5,19 @@ import (
"gitlink.org.cn/cloudream/storage/common/pkgs/ioswitch"
)
+// Service 表示一个消息队列服务
+// 它包含了任务管理和IO切换器两个核心组件
type Service struct {
- taskManager *task.Manager
- sw *ioswitch.Switch
+ taskManager *task.Manager // taskManager 用于管理和调度任务
+ sw *ioswitch.Switch // sw 用于控制IO切换
}
+// NewService 创建一个新的消息队列服务实例
+// 参数:
+// - taskMgr:任务管理器,负责任务的调度和管理
+// - sw:IO切换器,用于控制数据的输入输出
+// 返回值:
+// - *Service:指向创建的消息队列服务实例的指针
func NewService(taskMgr *task.Manager, sw *ioswitch.Switch) *Service {
return &Service{
taskManager: taskMgr,
diff --git a/agent/internal/mq/storage.go b/agent/internal/mq/storage.go
index bdc006e..283da0d 100644
--- a/agent/internal/mq/storage.go
+++ b/agent/internal/mq/storage.go
@@ -23,51 +23,77 @@ import (
"gitlink.org.cn/cloudream/storage/common/utils"
)
+// StartStorageLoadPackage 启动存储加载包任务
+// 参数:
+// - msg: 包含启动存储加载包任务所需信息的消息对象,包括用户ID、包ID和存储ID
+// 返回值:
+// - *agtmq.StartStorageLoadPackageResp: 任务启动成功的响应对象,包含任务ID
+// - *mq.CodeMessage: 任务启动失败时的错误信息对象,包含错误码和错误消息
func (svc *Service) StartStorageLoadPackage(msg *agtmq.StartStorageLoadPackage) (*agtmq.StartStorageLoadPackageResp, *mq.CodeMessage) {
+ // 在任务管理器中启动一个新的存储加载包任务,并获取任务ID
tsk := svc.taskManager.StartNew(mytask.NewStorageLoadPackage(msg.UserID, msg.PackageID, msg.StorageID))
+ // 构造并返回任务启动成功的响应消息,包含任务ID
return mq.ReplyOK(agtmq.NewStartStorageLoadPackageResp(tsk.ID()))
}
+// WaitStorageLoadPackage 等待存储加载包的任务完成。
+//
+// 参数:
+//
+// msg *agtmq.WaitStorageLoadPackage: 包含任务ID和可选的等待超时时间的消息。
+//
+// 返回值:
+//
+// *agtmq.WaitStorageLoadPackageResp: 如果任务找到且已完成(或超时),则返回任务的响应信息,包括是否成功、错误信息和完整输出路径。
+// *mq.CodeMessage: 如果任务未找到,则返回错误代码和消息。
func (svc *Service) WaitStorageLoadPackage(msg *agtmq.WaitStorageLoadPackage) (*agtmq.WaitStorageLoadPackageResp, *mq.CodeMessage) {
- logger.WithField("TaskID", msg.TaskID).Debugf("wait loading package")
+ logger.WithField("TaskID", msg.TaskID).Debugf("wait loading package") // 记录等待加载包任务的debug信息
- tsk := svc.taskManager.FindByID(msg.TaskID)
+ tsk := svc.taskManager.FindByID(msg.TaskID) // 根据任务ID查找任务
if tsk == nil {
- return nil, mq.Failed(errorcode.TaskNotFound, "task not found")
+ return nil, mq.Failed(errorcode.TaskNotFound, "task not found") // 如果任务未找到,返回任务未找到的错误信息
}
if msg.WaitTimeoutMs == 0 {
- tsk.Wait()
+ tsk.Wait() // 如果没有设置等待超时,那么就无限等待任务完成
- errMsg := ""
+ errMsg := "" // 初始化错误信息为空
if tsk.Error() != nil {
- errMsg = tsk.Error().Error()
+ errMsg = tsk.Error().Error() // 如果任务有错误,记录错误信息
}
- loadTsk := tsk.Body().(*mytask.StorageLoadPackage)
-
- return mq.ReplyOK(agtmq.NewWaitStorageLoadPackageResp(true, errMsg, loadTsk.FullOutputPath))
+ loadTsk := tsk.Body().(*mytask.StorageLoadPackage) // 将任务体转换为存储加载包类型
+ return mq.ReplyOK(agtmq.NewWaitStorageLoadPackageResp(true, errMsg, loadTsk.FullOutputPath)) // 返回任务完成的状态,错误信息和完整输出路径
} else {
+ // 如果设置了等待超时,就设置超时时间等待任务完成
if tsk.WaitTimeout(time.Duration(msg.WaitTimeoutMs) * time.Millisecond) {
- errMsg := ""
+ errMsg := "" // 初始化错误信息为空
if tsk.Error() != nil {
- errMsg = tsk.Error().Error()
+ errMsg = tsk.Error().Error() // 如果任务有错误,记录错误信息
}
- loadTsk := tsk.Body().(*mytask.StorageLoadPackage)
+ loadTsk := tsk.Body().(*mytask.StorageLoadPackage) // 将任务体转换为存储加载包类型
- return mq.ReplyOK(agtmq.NewWaitStorageLoadPackageResp(true, errMsg, loadTsk.FullOutputPath))
+ return mq.ReplyOK(agtmq.NewWaitStorageLoadPackageResp(true, errMsg, loadTsk.FullOutputPath)) // 返回任务完成的状态,错误信息和完整输出路径
}
- return mq.ReplyOK(agtmq.NewWaitStorageLoadPackageResp(false, "", ""))
+ return mq.ReplyOK(agtmq.NewWaitStorageLoadPackageResp(false, "", "")) // 如果等待超时,返回任务未完成的状态
}
}
+// StorageCheck 对指定目录进行存储检查
+// 参数:
+// - msg: 包含需要检查的存储目录信息
+// 返回值:
+// - *agtmq.StorageCheckResp: 存储检查响应,包含检查结果和存储包信息
+// - *mq.CodeMessage: 错误信息,如果操作成功,则为nil
func (svc *Service) StorageCheck(msg *agtmq.StorageCheck) (*agtmq.StorageCheckResp, *mq.CodeMessage) {
+ // 尝试读取指定的目录
infos, err := os.ReadDir(msg.Directory)
if err != nil {
+ // 如果读取目录失败,记录警告信息,并返回错误信息和空的存储包列表
logger.Warnf("list storage directory failed, err: %s", err.Error())
return mq.ReplyOK(agtmq.NewStorageCheckResp(
err.Error(),
@@ -77,24 +103,31 @@ func (svc *Service) StorageCheck(msg *agtmq.StorageCheck) (*agtmq.StorageCheckRe
var stgPkgs []model.StoragePackage
+ // 过滤出目录中的子目录(用户目录)
userDirs := lo.Filter(infos, func(info fs.DirEntry, index int) bool { return info.IsDir() })
for _, dir := range userDirs {
+ // 尝试将子目录名称解析为用户ID
userIDInt, err := strconv.ParseInt(dir.Name(), 10, 64)
if err != nil {
+ // 如果解析失败,记录警告信息,并继续处理下一个目录
logger.Warnf("parsing user id %s: %s", dir.Name(), err.Error())
continue
}
+ // 构造存储包目录路径,并读取该目录
pkgDir := utils.MakeStorageLoadDirectory(msg.Directory, dir.Name())
pkgDirs, err := os.ReadDir(pkgDir)
if err != nil {
+ // 如果读取目录失败,记录警告信息,并继续处理下一个用户目录
logger.Warnf("reading package dir %s: %s", pkgDir, err.Error())
continue
}
+ // 遍历存储包目录中的包,解析包ID,并添加到存储包列表中
for _, pkg := range pkgDirs {
pkgIDInt, err := strconv.ParseInt(pkg.Name(), 10, 64)
if err != nil {
+ // 如果解析失败,记录警告信息,并继续处理下一个包
logger.Warnf("parsing package dir %s: %s", pkg.Name(), err.Error())
continue
}
@@ -107,17 +140,31 @@ func (svc *Service) StorageCheck(msg *agtmq.StorageCheck) (*agtmq.StorageCheckRe
}
}
+ // 返回存储检查成功的响应,包含存储包列表
return mq.ReplyOK(agtmq.NewStorageCheckResp(consts.StorageDirectoryStateOK, stgPkgs))
}
+// StorageGC 执行存储垃圾回收
+// 根据提供的目录和包信息,清理不再需要的文件和目录。
+//
+// 参数:
+//
+// msg *agtmq.StorageGC: 包含需要进行垃圾回收的目录和包信息。
+//
+// 返回值:
+//
+// *agtmq.StorageGCResp: 垃圾回收操作的响应信息。
+// *mq.CodeMessage: 如果操作失败,返回错误代码和消息。
func (svc *Service) StorageGC(msg *agtmq.StorageGC) (*agtmq.StorageGCResp, *mq.CodeMessage) {
+ // 尝试列出指定目录下的所有文件和目录
infos, err := os.ReadDir(msg.Directory)
if err != nil {
+ // 如果列出失败,记录日志并返回操作失败信息
logger.Warnf("list storage directory failed, err: %s", err.Error())
return nil, mq.Failed(errorcode.OperationFailed, "list directory files failed")
}
- // userID->pkgID->pkg
+ // 构建用户ID到包ID的映射,以便知道哪些包是需要保留的
userPkgs := make(map[string]map[string]bool)
for _, pkg := range msg.Packages {
userIDStr := fmt.Sprintf("%d", pkg.UserID)
@@ -132,10 +179,11 @@ func (svc *Service) StorageGC(msg *agtmq.StorageGC) (*agtmq.StorageGCResp, *mq.C
pkgs[pkgIDStr] = true
}
+ // 过滤出目录条目,并遍历这些目录
userDirs := lo.Filter(infos, func(info fs.DirEntry, index int) bool { return info.IsDir() })
for _, dir := range userDirs {
pkgMap, ok := userPkgs[dir.Name()]
- // 第一级目录名是UserID,先删除UserID在StoragePackage表里没出现过的文件夹
+ // 如果当前目录在需要保留的包映射中不存在,则删除该目录
if !ok {
rmPath := filepath.Join(msg.Directory, dir.Name())
err := os.RemoveAll(rmPath)
@@ -147,8 +195,8 @@ func (svc *Service) StorageGC(msg *agtmq.StorageGC) (*agtmq.StorageGCResp, *mq.C
continue
}
+ // 遍历每个用户目录下的packages目录,删除不在保留包映射中的包
pkgDir := utils.MakeStorageLoadDirectory(msg.Directory, dir.Name())
- // 遍历每个UserID目录的packages目录里的内容
pkgs, err := os.ReadDir(pkgDir)
if err != nil {
logger.Warnf("reading package dir %s: %s", pkgDir, err.Error())
@@ -168,28 +216,47 @@ func (svc *Service) StorageGC(msg *agtmq.StorageGC) (*agtmq.StorageGCResp, *mq.C
}
}
+ // 垃圾回收完成,返回成功响应
return mq.ReplyOK(agtmq.RespStorageGC())
}
+// StartStorageCreatePackage 开始创建存储包的任务。
+// 接收一个启动存储创建包的消息,并返回任务响应或错误消息。
+//
+// 参数:
+//
+// msg *agtmq.StartStorageCreatePackage - 包含创建存储包所需信息的消息。
+//
+// 返回值:
+//
+// *agtmq.StartStorageCreatePackageResp - 创建任务成功的响应,包含任务ID。
+// *mq.CodeMessage - 创建任务失败时返回的错误信息。
func (svc *Service) StartStorageCreatePackage(msg *agtmq.StartStorageCreatePackage) (*agtmq.StartStorageCreatePackageResp, *mq.CodeMessage) {
+ // 从协调器MQ池获取客户端
coorCli, err := stgglb.CoordinatorMQPool.Acquire()
if err != nil {
+ // 如果获取客户端失败,记录警告并返回错误消息
logger.Warnf("new coordinator client: %s", err.Error())
return nil, mq.Failed(errorcode.OperationFailed, "new coordinator client failed")
}
+ // 确保在函数结束时释放协调器MQ客户端
defer stgglb.CoordinatorMQPool.Release(coorCli)
+ // 获取存储信息
getStgResp, err := coorCli.GetStorageInfo(coormq.NewGetStorageInfo(msg.UserID, msg.StorageID))
if err != nil {
+ // 如果获取存储信息失败,记录警告并返回错误消息
logger.WithField("StorageID", msg.StorageID).
Warnf("getting storage info: %s", err.Error())
return nil, mq.Failed(errorcode.OperationFailed, "get storage info failed")
}
+ // 计算打包文件的完整路径
fullPath := filepath.Clean(filepath.Join(getStgResp.Directory, msg.Path))
+ // 遍历目录,收集所有需要上传的文件路径
var uploadFilePathes []string
err = filepath.WalkDir(fullPath, func(fname string, fi os.DirEntry, err error) error {
if err != nil {
@@ -203,32 +270,52 @@ func (svc *Service) StartStorageCreatePackage(msg *agtmq.StartStorageCreatePacka
return nil
})
if err != nil {
+ // 如果目录读取失败,记录警告并返回错误消息
logger.Warnf("opening directory %s: %s", fullPath, err.Error())
return nil, mq.Failed(errorcode.OperationFailed, "read directory failed")
}
+ // 创建上传对象的迭代器
objIter := iterator.NewUploadingObjectIterator(fullPath, uploadFilePathes)
+ // 启动新任务来创建存储包
tsk := svc.taskManager.StartNew(mytask.NewCreatePackage(msg.UserID, msg.BucketID, msg.Name, objIter, msg.NodeAffinity))
+ // 返回任务成功的响应
return mq.ReplyOK(agtmq.NewStartStorageCreatePackageResp(tsk.ID()))
}
+// WaitStorageCreatePackage 等待存储创建包的处理函数。
+//
+// 参数:
+// msg: 包含任务ID和等待超时时间的消息对象。
+//
+// 返回值:
+// 返回一个任务响应对象和一个错误消息对象。如果任务找到且未超时,将返回任务的结果;如果任务未找到或超时,将返回相应的错误信息。
func (svc *Service) WaitStorageCreatePackage(msg *agtmq.WaitStorageCreatePackage) (*agtmq.WaitStorageCreatePackageResp, *mq.CodeMessage) {
+ // 根据任务ID查找任务
tsk := svc.taskManager.FindByID(msg.TaskID)
if tsk == nil {
+ // 如果任务未找到,返回任务未找到错误
return nil, mq.Failed(errorcode.TaskNotFound, "task not found")
}
+ // 根据等待超时时间进行等待处理
if msg.WaitTimeoutMs == 0 {
+ // 如果没有设置超时时间,无限等待
tsk.Wait()
} else if !tsk.WaitTimeout(time.Duration(msg.WaitTimeoutMs) * time.Millisecond) {
+ // 如果设置了超时时间,且超时未完成,返回超时处理结果
return mq.ReplyOK(agtmq.NewWaitStorageCreatePackageResp(false, "", 0))
}
+ // 检查任务是否有错误
if tsk.Error() != nil {
+ // 如果任务有错误,返回错误信息
return mq.ReplyOK(agtmq.NewWaitStorageCreatePackageResp(true, tsk.Error().Error(), 0))
}
+ // 获取任务结果
taskBody := tsk.Body().(*mytask.CreatePackage)
+ // 返回任务成功处理结果
return mq.ReplyOK(agtmq.NewWaitStorageCreatePackageResp(true, "", taskBody.Result.PackageID))
}
diff --git a/agent/internal/task/cache_move_package.go b/agent/internal/task/cache_move_package.go
index a8667b5..6a0a6a5 100644
--- a/agent/internal/task/cache_move_package.go
+++ b/agent/internal/task/cache_move_package.go
@@ -13,11 +13,13 @@ import (
coormq "gitlink.org.cn/cloudream/storage/common/pkgs/mq/coordinator"
)
+// CacheMovePackage 代表缓存移动包的任务实体。
type CacheMovePackage struct {
- userID cdssdk.UserID
- packageID cdssdk.PackageID
+ userID cdssdk.UserID // 用户ID
+ packageID cdssdk.PackageID // 包ID
}
+// NewCacheMovePackage 创建一个新的缓存移动包任务实例。
func NewCacheMovePackage(userID cdssdk.UserID, packageID cdssdk.PackageID) *CacheMovePackage {
return &CacheMovePackage{
userID: userID,
@@ -25,6 +27,10 @@ func NewCacheMovePackage(userID cdssdk.UserID, packageID cdssdk.PackageID) *Cach
}
}
+// Execute 执行缓存移动包的任务。
+// task: 任务实例。
+// ctx: 任务上下文。
+// complete: 任务完成的回调函数。
func (t *CacheMovePackage) Execute(task *task.Task[TaskContext], ctx TaskContext, complete CompleteFn) {
err := t.do(ctx)
complete(err, CompleteOption{
@@ -32,13 +38,14 @@ func (t *CacheMovePackage) Execute(task *task.Task[TaskContext], ctx TaskContext
})
}
+// do 实际执行缓存移动的逻辑。
func (t *CacheMovePackage) do(ctx TaskContext) error {
log := logger.WithType[CacheMovePackage]("Task")
log.Debugf("begin with %v", logger.FormatStruct(t))
defer log.Debugf("end")
+ // 获取分布式锁以保护操作
mutex, err := reqbuilder.NewBuilder().
- // 保护解码出来的Object数据
IPFS().Buzy(*stgglb.Local.NodeID).
MutexLock(ctx.distlock)
if err != nil {
@@ -46,24 +53,27 @@ func (t *CacheMovePackage) do(ctx TaskContext) error {
}
defer mutex.Unlock()
+ // 获取协调器MQ客户端
coorCli, err := stgglb.CoordinatorMQPool.Acquire()
if err != nil {
return fmt.Errorf("new coordinator client: %w", err)
}
defer stgglb.CoordinatorMQPool.Release(coorCli)
+ // 获取包内对象详情
getResp, err := coorCli.GetPackageObjectDetails(coormq.NewGetPackageObjectDetails(t.packageID))
if err != nil {
return fmt.Errorf("getting package object details: %w", err)
}
+ // 获取IPFS客户端
ipfsCli, err := stgglb.IPFSPool.Acquire()
if err != nil {
return fmt.Errorf("new ipfs client: %w", err)
}
defer ipfsCli.Close()
- // TODO 可以考虑优化,比如rep类型的直接pin就可以
+ // 遍历并下载对象
objIter := iterator.NewDownloadObjectIterator(getResp.Objects, &iterator.DownloadContext{
Distlock: ctx.distlock,
})
@@ -79,12 +89,14 @@ func (t *CacheMovePackage) do(ctx TaskContext) error {
}
defer obj.File.Close()
+ // 将对象文件添加到IPFS
_, err = ipfsCli.CreateFile(obj.File)
if err != nil {
return fmt.Errorf("creating ipfs file: %w", err)
}
}
+ // 通知协调器缓存已移动
_, err = coorCli.CachePackageMoved(coormq.NewCachePackageMoved(t.packageID, *stgglb.Local.NodeID))
if err != nil {
return fmt.Errorf("request to coordinator: %w", err)
diff --git a/agent/internal/task/create_package.go b/agent/internal/task/create_package.go
index 1ddd848..9492443 100644
--- a/agent/internal/task/create_package.go
+++ b/agent/internal/task/create_package.go
@@ -13,11 +13,15 @@ import (
"gitlink.org.cn/cloudream/storage/common/pkgs/mq/coordinator"
)
+// CreatePackageResult 定义创建包的结果结构
+// 包含包的ID和上传的对象列表
type CreatePackageResult struct {
PackageID cdssdk.PackageID
Objects []cmd.ObjectUploadResult
}
+// CreatePackage 定义创建包的任务结构
+// 包含用户ID、存储桶ID、包名称、上传对象的迭代器、节点亲和性以及任务结果
type CreatePackage struct {
userID cdssdk.UserID
bucketID cdssdk.BucketID
@@ -27,6 +31,13 @@ type CreatePackage struct {
Result *CreatePackageResult
}
+// NewCreatePackage 创建一个新的CreatePackage实例
+// userID: 用户ID
+// bucketID: 存储桶ID
+// name: 包名称
+// objIter: 上传对象的迭代器
+// nodeAffinity: 节点亲和性,指定包应该创建在哪个节点上(可选)
+// 返回CreatePackage实例的指针
func NewCreatePackage(userID cdssdk.UserID, bucketID cdssdk.BucketID, name string, objIter iterator.UploadingObjectIterator, nodeAffinity *cdssdk.NodeID) *CreatePackage {
return &CreatePackage{
userID: userID,
@@ -37,15 +48,23 @@ func NewCreatePackage(userID cdssdk.UserID, bucketID cdssdk.BucketID, name strin
}
}
+// Execute 执行创建包的任务
+// task: 任务实例,携带任务上下文
+// ctx: 任务上下文,包含分布式锁和网络连接性等信息
+// complete: 任务完成的回调函数
func (t *CreatePackage) Execute(task *task.Task[TaskContext], ctx TaskContext, complete CompleteFn) {
+ // 获取任务日志记录器
log := logger.WithType[CreatePackage]("Task")
+
log.Debugf("begin")
defer log.Debugf("end")
+ // 从MQ池中获取协调器客户端
coorCli, err := stgglb.CoordinatorMQPool.Acquire()
if err != nil {
err = fmt.Errorf("new coordinator client: %w", err)
log.Warn(err.Error())
+ // 完成任务并设置移除延迟
complete(err, CompleteOption{
RemovingDelay: time.Minute,
})
@@ -53,16 +72,19 @@ func (t *CreatePackage) Execute(task *task.Task[TaskContext], ctx TaskContext, c
}
defer stgglb.CoordinatorMQPool.Release(coorCli)
+ // 向协调器创建包
createResp, err := coorCli.CreatePackage(coordinator.NewCreatePackage(t.userID, t.bucketID, t.name))
if err != nil {
err = fmt.Errorf("creating package: %w", err)
log.Error(err.Error())
+ // 完成任务并设置移除延迟
complete(err, CompleteOption{
RemovingDelay: time.Minute,
})
return
}
+ // 上传包内的对象
uploadRet, err := cmd.NewUploadObjects(t.userID, createResp.PackageID, t.objIter, t.nodeAffinity).Execute(&cmd.UploadObjectsContext{
Distlock: ctx.distlock,
Connectivity: ctx.connectivity,
@@ -70,15 +92,18 @@ func (t *CreatePackage) Execute(task *task.Task[TaskContext], ctx TaskContext, c
if err != nil {
err = fmt.Errorf("uploading objects: %w", err)
log.Error(err.Error())
+ // 完成任务并设置移除延迟
complete(err, CompleteOption{
RemovingDelay: time.Minute,
})
return
}
+ // 保存上传结果到任务结果中
t.Result.PackageID = createResp.PackageID
t.Result.Objects = uploadRet.Objects
+ // 完成任务并设置移除延迟
complete(nil, CompleteOption{
RemovingDelay: time.Minute,
})
diff --git a/agent/internal/task/execute_io_plan.go b/agent/internal/task/execute_io_plan.go
index 06f3ed7..2449768 100644
--- a/agent/internal/task/execute_io_plan.go
+++ b/agent/internal/task/execute_io_plan.go
@@ -9,37 +9,60 @@ import (
"gitlink.org.cn/cloudream/storage/common/pkgs/ioswitch"
)
-// TODO 临时使用Task来等待Plan执行进度
+// ExecuteIOPlan 用于执行I/O计划的任务结构体
+// 临时使用Task来等待Plan执行进度
type ExecuteIOPlan struct {
- PlanID ioswitch.PlanID
- Result ioswitch.PlanResult
+ PlanID ioswitch.PlanID // 计划ID
+ Result ioswitch.PlanResult // 执行结果
}
+// NewExecuteIOPlan 创建一个新的ExecuteIOPlan实例
+// 参数:
+//
+// planID: 要执行的I/O计划的ID
+//
+// 返回值:
+//
+// *ExecuteIOPlan: 新创建的ExecuteIOPlan实例的指针
func NewExecuteIOPlan(planID ioswitch.PlanID) *ExecuteIOPlan {
return &ExecuteIOPlan{
PlanID: planID,
}
}
+// Execute 执行I/O计划
+// 参数:
+//
+// task: 任务实例
+// ctx: 任务执行上下文
+// complete: 完成回调函数
+//
+// 说明:
+//
+// 此函数开始执行指定的I/O计划,并通过回调函数报告完成状态
func (t *ExecuteIOPlan) Execute(task *task.Task[TaskContext], ctx TaskContext, complete CompleteFn) {
+ // 记录任务日志
log := logger.WithType[ExecuteIOPlan]("Task")
log.Debugf("begin with %v", logger.FormatStruct(t))
- defer log.Debugf("end")
+ defer log.Debugf("end") // 确保日志记录任务结束
+ // 执行I/O计划
ret, err := ctx.sw.ExecutePlan(t.PlanID)
if err != nil {
+ // 执行计划失败,记录警告日志并调用完成回调函数
err := fmt.Errorf("executing io plan: %w", err)
log.WithField("PlanID", t.PlanID).Warn(err.Error())
complete(err, CompleteOption{
- RemovingDelay: time.Minute,
+ RemovingDelay: time.Minute, // 设置延迟删除选项
})
return
}
+ // 计划执行成功,更新结果并调用完成回调函数
t.Result = ret
complete(nil, CompleteOption{
- RemovingDelay: time.Minute,
+ RemovingDelay: time.Minute, // 设置延迟删除选项
})
}
diff --git a/agent/internal/task/ipfs_pin.go b/agent/internal/task/ipfs_pin.go
index 622b45d..d11d059 100644
--- a/agent/internal/task/ipfs_pin.go
+++ b/agent/internal/task/ipfs_pin.go
@@ -9,23 +9,35 @@ import (
stgglb "gitlink.org.cn/cloudream/storage/common/globals"
)
+// IPFSPin 定义了一个结构体,用于IPFS的pin操作任务。
type IPFSPin struct {
- FileHashes []string
+ FileHashes []string // FileHashes 存储需要pin的文件的hash列表。
}
+// NewIPFSPin 创建一个新的IPFSPin实例。
+// fileHashes 是一个包含需要pin的文件hash的字符串切片。
+// 返回一个指向IPFSPin实例的指针。
func NewIPFSPin(fileHashes []string) *IPFSPin {
return &IPFSPin{
FileHashes: fileHashes,
}
}
+// Execute 执行IPFSPin任务。
+// 该函数负责获取IPFS客户端,然后对FileHashes中的每个文件hash执行pin操作。
+// task 是一个指向task.Task[TaskContext]的指针,代表当前的任务实例。
+// ctx 是当前任务的上下文信息。
+// complete 是一个完成回调函数,用于在任务结束时(成功或失败)进行一些清理工作。
func (t *IPFSPin) Execute(task *task.Task[TaskContext], ctx TaskContext, complete CompleteFn) {
+ // 使用logger记录任务开始的信息。
log := logger.WithType[IPFSPin]("Task")
log.Debugf("begin with %v", logger.FormatStruct(t))
- defer log.Debugf("end")
+ defer log.Debugf("end") // 确保记录任务结束的信息。
+ // 尝试从IPFS池中获取一个客户端实例。
ipfsCli, err := stgglb.IPFSPool.Acquire()
if err != nil {
+ // 如果获取客户端失败,则使用complete函数通知任务失败,并设置移除延迟。
err := fmt.Errorf("new ipfs client: %w", err)
log.Warn(err.Error())
@@ -34,11 +46,13 @@ func (t *IPFSPin) Execute(task *task.Task[TaskContext], ctx TaskContext, complet
})
return
}
- defer ipfsCli.Close()
+ defer ipfsCli.Close() // 确保在函数返回前释放IPFS客户端实例。
+ // 遍历文件hash列表,并尝试对每个hash执行pin操作。
for _, fileHash := range t.FileHashes {
err = ipfsCli.Pin(fileHash)
if err != nil {
+ // 如果pin操作失败,则使用complete函数通知任务失败,并设置移除延迟。
err := fmt.Errorf("pin file failed, err: %w", err)
log.WithField("FileHash", fileHash).Warn(err.Error())
@@ -49,6 +63,7 @@ func (t *IPFSPin) Execute(task *task.Task[TaskContext], ctx TaskContext, complet
}
}
+ // 所有文件的pin操作成功,使用complete函数通知任务成功完成,并设置移除延迟。
complete(nil, CompleteOption{
RemovingDelay: time.Minute,
})
diff --git a/agent/internal/task/ipfs_read.go b/agent/internal/task/ipfs_read.go
index aab48bf..4fbe708 100644
--- a/agent/internal/task/ipfs_read.go
+++ b/agent/internal/task/ipfs_read.go
@@ -12,11 +12,13 @@ import (
stgglb "gitlink.org.cn/cloudream/storage/common/globals"
)
+// IPFSRead 代表从IPFS读取文件的任务
type IPFSRead struct {
- FileHash string
- LocalPath string
+ FileHash string // 文件的IPFS哈希值
+ LocalPath string // 本地存储路径
}
+// NewIPFSRead 创建一个新的IPFS读取任务实例
func NewIPFSRead(fileHash string, localPath string) *IPFSRead {
return &IPFSRead{
FileHash: fileHash,
@@ -24,6 +26,9 @@ func NewIPFSRead(fileHash string, localPath string) *IPFSRead {
}
}
+// Compare 比较当前任务与另一个任务是否相同
+// other: 要比较的另一个任务
+// 返回值: 如果两个任务相同返回true,否则返回false
func (t *IPFSRead) Compare(other *Task) bool {
tsk, ok := other.Body().(*IPFSRead)
if !ok {
@@ -33,15 +38,23 @@ func (t *IPFSRead) Compare(other *Task) bool {
return t.FileHash == tsk.FileHash && t.LocalPath == tsk.LocalPath
}
+// Execute 执行从IPFS读取文件并存储到本地的任务
+// task: 任务实例
+// ctx: 任务上下文
+// complete: 任务完成的回调函数
func (t *IPFSRead) Execute(task *task.Task[TaskContext], ctx TaskContext, complete CompleteFn) {
+ // 初始化日志
log := logger.WithType[IPFSRead]("Task")
log.Debugf("begin with %v", logger.FormatStruct(t))
defer log.Debugf("end")
+ // 获取输出文件的目录并创建该目录
outputFileDir := filepath.Dir(t.LocalPath)
+ // 创建输出文件的目录
err := os.MkdirAll(outputFileDir, os.ModePerm)
if err != nil {
+ // 目录创建失败的处理
err := fmt.Errorf("create output file directory %s failed, err: %w", outputFileDir, err)
log.WithField("LocalPath", t.LocalPath).Warn(err.Error())
@@ -51,8 +64,10 @@ func (t *IPFSRead) Execute(task *task.Task[TaskContext], ctx TaskContext, comple
return
}
+ // 创建输出文件
outputFile, err := os.Create(t.LocalPath)
if err != nil {
+ // 输出文件创建失败的处理
err := fmt.Errorf("create output file %s failed, err: %w", t.LocalPath, err)
log.WithField("LocalPath", t.LocalPath).Warn(err.Error())
@@ -63,8 +78,10 @@ func (t *IPFSRead) Execute(task *task.Task[TaskContext], ctx TaskContext, comple
}
defer outputFile.Close()
+ // 获取IPFS客户端
ipfsCli, err := stgglb.IPFSPool.Acquire()
if err != nil {
+ // 获取IPFS客户端失败的处理
err := fmt.Errorf("new ipfs client: %w", err)
log.Warn(err.Error())
@@ -75,8 +92,10 @@ func (t *IPFSRead) Execute(task *task.Task[TaskContext], ctx TaskContext, comple
}
defer ipfsCli.Close()
+ // 打开IPFS中的文件进行读取
rd, err := ipfsCli.OpenRead(t.FileHash)
if err != nil {
+ // 打开IPFS文件失败的处理
err := fmt.Errorf("read ipfs file failed, err: %w", err)
log.WithField("FileHash", t.FileHash).Warn(err.Error())
@@ -86,8 +105,10 @@ func (t *IPFSRead) Execute(task *task.Task[TaskContext], ctx TaskContext, comple
return
}
+ // 将IPFS文件内容复制到本地文件
_, err = io.Copy(outputFile, rd)
if err != nil {
+ // 文件复制失败的处理
err := fmt.Errorf("copy ipfs file to local file failed, err: %w", err)
log.WithField("LocalPath", t.LocalPath).Warn(err.Error())
@@ -97,6 +118,7 @@ func (t *IPFSRead) Execute(task *task.Task[TaskContext], ctx TaskContext, comple
return
}
+ // 任务完成,调用回调函数
complete(nil, CompleteOption{
RemovingDelay: time.Minute,
})
diff --git a/agent/internal/task/storage_load_package.go b/agent/internal/task/storage_load_package.go
index ba40773..0dc2907 100644
--- a/agent/internal/task/storage_load_package.go
+++ b/agent/internal/task/storage_load_package.go
@@ -25,6 +25,7 @@ import (
"gitlink.org.cn/cloudream/storage/common/utils"
)
+// StorageLoadPackage 定义了存储加载包的结构体,包含完整的输出路径和与存储、包、用户相关的ID。
type StorageLoadPackage struct {
FullOutputPath string
@@ -34,6 +35,11 @@ type StorageLoadPackage struct {
pinnedBlocks []stgmod.ObjectBlock
}
+// NewStorageLoadPackage 创建一个新的StorageLoadPackage实例。
+// userID: 用户ID。
+// packageID: 包ID。
+// storageID: 存储ID。
+// 返回一个新的StorageLoadPackage指针。
func NewStorageLoadPackage(userID cdssdk.UserID, packageID cdssdk.PackageID, storageID cdssdk.StorageID) *StorageLoadPackage {
return &StorageLoadPackage{
userID: userID,
@@ -41,6 +47,12 @@ func NewStorageLoadPackage(userID cdssdk.UserID, packageID cdssdk.PackageID, sto
storageID: storageID,
}
}
+
+// Execute 执行存储加载任务。
+// task: 任务实例。
+// ctx: 任务上下文。
+// complete: 完成回调函数。
+// 无返回值。
func (t *StorageLoadPackage) Execute(task *task.Task[TaskContext], ctx TaskContext, complete CompleteFn) {
err := t.do(task, ctx)
@@ -49,35 +61,45 @@ func (t *StorageLoadPackage) Execute(task *task.Task[TaskContext], ctx TaskConte
})
}
+// do 实际执行存储加载的过程。
+// task: 任务实例。
+// ctx: 任务上下文。
+// 返回执行过程中可能出现的错误。
func (t *StorageLoadPackage) do(task *task.Task[TaskContext], ctx TaskContext) error {
+ // 获取协调器客户端
coorCli, err := stgglb.CoordinatorMQPool.Acquire()
if err != nil {
return fmt.Errorf("new coordinator client: %w", err)
}
defer stgglb.CoordinatorMQPool.Release(coorCli)
+ // 获取IPFS客户端
ipfsCli, err := stgglb.IPFSPool.Acquire()
if err != nil {
return fmt.Errorf("new IPFS client: %w", err)
}
defer stgglb.IPFSPool.Release(ipfsCli)
+ // 从协调器获取存储信息
getStgResp, err := coorCli.GetStorageInfo(coormq.NewGetStorageInfo(t.userID, t.storageID))
if err != nil {
return fmt.Errorf("request to coordinator: %w", err)
}
+ // 构造输出目录路径并创建该目录
outputDirPath := utils.MakeStorageLoadPackagePath(getStgResp.Directory, t.userID, t.packageID)
if err = os.MkdirAll(outputDirPath, 0755); err != nil {
return fmt.Errorf("creating output directory: %w", err)
}
t.FullOutputPath = outputDirPath
+ // 获取包对象详情
getObjectDetails, err := coorCli.GetPackageObjectDetails(coormq.NewGetPackageObjectDetails(t.packageID))
if err != nil {
return fmt.Errorf("getting package object details: %w", err)
}
+ // 获取互斥锁以确保并发安全
mutex, err := reqbuilder.NewBuilder().
// 提前占位
Metadata().StoragePackage().CreateOne(t.userID, t.storageID, t.packageID).
@@ -91,6 +113,7 @@ func (t *StorageLoadPackage) do(task *task.Task[TaskContext], ctx TaskContext) e
}
defer mutex.Unlock()
+ // 下载每个对象
for _, obj := range getObjectDetails.Objects {
err := t.downloadOne(coorCli, ipfsCli, outputDirPath, obj)
if err != nil {
@@ -98,6 +121,7 @@ func (t *StorageLoadPackage) do(task *task.Task[TaskContext], ctx TaskContext) e
}
}
+ // 通知协调器包已加载到存储
_, err = coorCli.StoragePackageLoaded(coormq.NewStoragePackageLoaded(t.userID, t.storageID, t.packageID, t.pinnedBlocks))
if err != nil {
return fmt.Errorf("loading package to storage: %w", err)
@@ -107,11 +131,23 @@ func (t *StorageLoadPackage) do(task *task.Task[TaskContext], ctx TaskContext) e
return err
}
+// downloadOne 用于下载一种特定冗余类型的对象。
+//
+// 参数:
+// - coorCli: 协调客户端,用于与CDN协调器进行通信。
+// - ipfsCli: IPFS池客户端,用于与IPFS网络进行交互。
+// - dir: 下载对象的目标目录。
+// - obj: 要下载的对象详细信息,包括对象路径和冗余类型等。
+//
+// 返回值:
+// - error: 下载过程中遇到的任何错误。
func (t *StorageLoadPackage) downloadOne(coorCli *coormq.Client, ipfsCli *ipfs.PoolClient, dir string, obj stgmod.ObjectDetail) error {
var file io.ReadCloser
+ // 根据对象的冗余类型选择不同的下载策略。
switch red := obj.Object.Redundancy.(type) {
case *cdssdk.NoneRedundancy:
+ // 无冗余或复制冗余对象的下载处理。
reader, err := t.downloadNoneOrRepObject(ipfsCli, obj)
if err != nil {
return fmt.Errorf("downloading object: %w", err)
@@ -119,6 +155,7 @@ func (t *StorageLoadPackage) downloadOne(coorCli *coormq.Client, ipfsCli *ipfs.P
file = reader
case *cdssdk.RepRedundancy:
+ // 复制冗余对象的下载处理。
reader, err := t.downloadNoneOrRepObject(ipfsCli, obj)
if err != nil {
return fmt.Errorf("downloading rep object: %w", err)
@@ -126,6 +163,7 @@ func (t *StorageLoadPackage) downloadOne(coorCli *coormq.Client, ipfsCli *ipfs.P
file = reader
case *cdssdk.ECRedundancy:
+ // 前向纠错冗余对象的下载处理。
reader, pinnedBlocks, err := t.downloadECObject(coorCli, ipfsCli, obj, red)
if err != nil {
return fmt.Errorf("downloading ec object: %w", err)
@@ -134,10 +172,12 @@ func (t *StorageLoadPackage) downloadOne(coorCli *coormq.Client, ipfsCli *ipfs.P
t.pinnedBlocks = append(t.pinnedBlocks, pinnedBlocks...)
default:
+ // 遇到未知的冗余类型返回错误。
return fmt.Errorf("unknow redundancy type: %v", myref.TypeOfValue(obj.Object.Redundancy))
}
- defer file.Close()
+ defer file.Close() // 确保文件在函数返回前被关闭。
+ // 拼接完整的文件路径,并创建包含该文件的目录。
fullPath := filepath.Join(dir, obj.Object.Path)
lastDirPath := filepath.Dir(fullPath)
@@ -145,12 +185,14 @@ func (t *StorageLoadPackage) downloadOne(coorCli *coormq.Client, ipfsCli *ipfs.P
return fmt.Errorf("creating object last dir: %w", err)
}
+ // 创建输出文件。
outputFile, err := os.Create(fullPath)
if err != nil {
return fmt.Errorf("creating object file: %w", err)
}
- defer outputFile.Close()
+ defer outputFile.Close() // 确保文件在函数返回前被关闭。
+ // 将下载的内容写入本地文件。
if _, err := io.Copy(outputFile, file); err != nil {
return fmt.Errorf("writting object to file: %w", err)
}
@@ -158,14 +200,25 @@ func (t *StorageLoadPackage) downloadOne(coorCli *coormq.Client, ipfsCli *ipfs.P
return nil
}
+// downloadNoneOrRepObject 用于下载没有冗余或需要从IPFS网络中检索的对象。
+// 如果对象不存在于任何节点上,则返回错误。
+//
+// 参数:
+// - ipfsCli: IPFS客户端池的指针,用于与IPFS网络交互。
+// - obj: 要下载的对象的详细信息。
+//
+// 返回值:
+// - io.ReadCloser: 下载文件的读取器。
+// - error: 如果下载过程中出现错误,则返回错误信息。
func (t *StorageLoadPackage) downloadNoneOrRepObject(ipfsCli *ipfs.PoolClient, obj stgmod.ObjectDetail) (io.ReadCloser, error) {
if len(obj.Blocks) == 0 && len(obj.PinnedAt) == 0 {
return nil, fmt.Errorf("no node has this object")
}
- // 不管实际有没有成功
+ // 将对象文件哈希添加到本地Pin列表,无论是否真正需要
ipfsCli.Pin(obj.Object.FileHash)
+ // 尝试打开并读取对象文件
file, err := ipfsCli.OpenRead(obj.Object.FileHash)
if err != nil {
return nil, err
@@ -174,23 +227,42 @@ func (t *StorageLoadPackage) downloadNoneOrRepObject(ipfsCli *ipfs.PoolClient, o
return file, nil
}
+// downloadECObject 用于下载采用EC(Erasure Coding)编码的对象。
+// 该方法会根据对象的块信息和EC冗余策略,从网络中下载必要的数据块并恢复整个对象。
+//
+// 参数:
+// - coorCli: 协调器客户端的指针,用于节点间的协调与通信。
+// - ipfsCli: IPFS客户端池的指针,用于与IPFS网络交互。
+// - obj: 要下载的对象的详细信息。
+// - ecRed: EC冗余策略的详细配置。
+//
+// 返回值:
+// - io.ReadCloser: 恢复后的对象文件的读取器。
+// - []stgmod.ObjectBlock: 被Pin住的对象块列表。
+// - error: 如果下载或恢复过程中出现错误,则返回错误信息。
func (t *StorageLoadPackage) downloadECObject(coorCli *coormq.Client, ipfsCli *ipfs.PoolClient, obj stgmod.ObjectDetail, ecRed *cdssdk.ECRedundancy) (io.ReadCloser, []stgmod.ObjectBlock, error) {
+ // 根据对象信息和节点状态,排序选择最优的下载节点
allNodes, err := t.sortDownloadNodes(coorCli, obj)
if err != nil {
return nil, nil, err
}
+
+ // 计算最小读取块解决方案和最小读取对象解决方案
bsc, blocks := t.getMinReadingBlockSolution(allNodes, ecRed.K)
osc, _ := t.getMinReadingObjectSolution(allNodes, ecRed.K)
+
+ // 如果通过块恢复更高效,则执行块恢复流程
if bsc < osc {
var fileStrs []io.ReadCloser
+ // 初始化RS编码器
rs, err := ec.NewRs(ecRed.K, ecRed.N, ecRed.ChunkSize)
if err != nil {
return nil, nil, fmt.Errorf("new rs: %w", err)
}
+ // 为每个需要读取的块执行Pin操作和打开读取流
for i := range blocks {
- // 不管实际有没有成功
ipfsCli.Pin(blocks[i].Block.FileHash)
str, err := ipfsCli.OpenRead(blocks[i].Block.FileHash)
@@ -204,8 +276,10 @@ func (t *StorageLoadPackage) downloadECObject(coorCli *coormq.Client, ipfsCli *i
fileStrs = append(fileStrs, str)
}
+ // 将多个文件流转换为统一的ReadCloser接口
fileReaders, filesCloser := myio.ToReaders(fileStrs)
+ // 准备恢复数据所需的信息和变量
var indexes []int
var pinnedBlocks []stgmod.ObjectBlock
for _, b := range blocks {
@@ -218,6 +292,7 @@ func (t *StorageLoadPackage) downloadECObject(coorCli *coormq.Client, ipfsCli *i
})
}
+ // 执行数据恢复,并将恢复后的数据转换为ReadCloser
outputs, outputsCloser := myio.ToReaders(rs.ReconstructData(fileReaders, indexes))
return myio.AfterReadClosed(myio.Length(myio.ChunkedJoin(outputs, int(ecRed.ChunkSize)), obj.Object.Size), func(c io.ReadCloser) {
filesCloser()
@@ -225,12 +300,11 @@ func (t *StorageLoadPackage) downloadECObject(coorCli *coormq.Client, ipfsCli *i
}), pinnedBlocks, nil
}
- // bsc >= osc,如果osc是MaxFloat64,那么bsc也一定是,也就意味着没有足够块来恢复文件
+ // 如果通过对象恢复更高效或没有足够的块来恢复文件,则直接尝试读取对象文件
if osc == math.MaxFloat64 {
return nil, nil, fmt.Errorf("no enough blocks to reconstruct the file, want %d, get only %d", ecRed.K, len(blocks))
}
- // 如果是直接读取的文件,那么就不需要Pin文件块
str, err := ipfsCli.OpenRead(obj.Object.FileHash)
return str, nil, err
}
@@ -242,7 +316,15 @@ type downloadNodeInfo struct {
Distance float64
}
+// sortDownloadNodes 对存储对象的下载节点进行排序
+// 参数:
+// - coorCli *coormq.Client: 协调器客户端,用于获取节点信息
+// - obj stgmod.ObjectDetail: 存储对象的详细信息,包含固定存储节点和数据块信息
+// 返回值:
+// - []*downloadNodeInfo: 排序后的下载节点信息数组
+// - error: 如果过程中发生错误,则返回错误信息
func (t *StorageLoadPackage) sortDownloadNodes(coorCli *coormq.Client, obj stgmod.ObjectDetail) ([]*downloadNodeInfo, error) {
+ // 收集对象的固定存储节点ID和数据块所在节点ID
var nodeIDs []cdssdk.NodeID
for _, id := range obj.PinnedAt {
if !lo.Contains(nodeIDs, id) {
@@ -255,11 +337,13 @@ func (t *StorageLoadPackage) sortDownloadNodes(coorCli *coormq.Client, obj stgmo
}
}
+ // 获取节点信息
getNodes, err := coorCli.GetNodes(coormq.NewGetNodes(nodeIDs))
if err != nil {
return nil, fmt.Errorf("getting nodes: %w", err)
}
+ // 建立下载节点信息的映射表
downloadNodeMap := make(map[cdssdk.NodeID]*downloadNodeInfo)
for _, id := range obj.PinnedAt {
node, ok := downloadNodeMap[id]
@@ -273,9 +357,10 @@ func (t *StorageLoadPackage) sortDownloadNodes(coorCli *coormq.Client, obj stgmo
downloadNodeMap[id] = node
}
- node.ObjectPinned = true
+ node.ObjectPinned = true // 标记为固定存储对象
}
+ // 为每个数据块所在节点填充信息,并收集到映射表中
for _, b := range obj.Blocks {
node, ok := downloadNodeMap[b.NodeID]
if !ok {
@@ -287,9 +372,10 @@ func (t *StorageLoadPackage) sortDownloadNodes(coorCli *coormq.Client, obj stgmo
downloadNodeMap[b.NodeID] = node
}
- node.Blocks = append(node.Blocks, b)
+ node.Blocks = append(node.Blocks, b) // 添加数据块信息
}
+ // 根据节点与存储对象的距离进行排序
return sort2.Sort(lo.Values(downloadNodeMap), func(left, right *downloadNodeInfo) int {
return sort2.Cmp(left.Distance, right.Distance)
}), nil
@@ -300,12 +386,19 @@ type downloadBlock struct {
Block stgmod.ObjectBlock
}
+// getMinReadingBlockSolution 获取最小读取区块解决方案
+// sortedNodes: 已排序的节点信息列表,每个节点包含多个区块信息
+// k: 需要获取的区块数量
+// 返回值: 返回获取到的区块的总距离和区块列表
func (t *StorageLoadPackage) getMinReadingBlockSolution(sortedNodes []*downloadNodeInfo, k int) (float64, []downloadBlock) {
+ // 初始化已获取区块的bitmap和距离
gotBlocksMap := bitmap.Bitmap64(0)
var gotBlocks []downloadBlock
dist := float64(0.0)
+ // 遍历所有节点及其区块,直到获取到k个不同的区块
for _, n := range sortedNodes {
for _, b := range n.Blocks {
+ // 如果区块未被获取,则添加到列表中,并更新距离
if !gotBlocksMap.Get(b.Index) {
gotBlocks = append(gotBlocks, downloadBlock{
Node: n.Node,
@@ -315,18 +408,25 @@ func (t *StorageLoadPackage) getMinReadingBlockSolution(sortedNodes []*downloadN
dist += n.Distance
}
+ // 如果已获取的区块数量达到k,返回结果
if len(gotBlocks) >= k {
return dist, gotBlocks
}
}
}
+ // 如果无法获取到k个不同的区块,返回最大距离和空的区块列表
return math.MaxFloat64, gotBlocks
}
+// getMinReadingObjectSolution 获取最小读取对象解决方案
+// sortedNodes: 已排序的节点信息列表,每个节点包含一个对象是否被固定的信息
+// k: 需要获取的对象数量
+// 返回值: 返回获取对象的最小距离和对应的节点
func (t *StorageLoadPackage) getMinReadingObjectSolution(sortedNodes []*downloadNodeInfo, k int) (float64, *cdssdk.Node) {
dist := math.MaxFloat64
var downloadNode *cdssdk.Node
+ // 遍历节点,寻找距离最小且对象被固定的节点
for _, n := range sortedNodes {
if n.ObjectPinned && float64(k)*n.Distance < dist {
dist = float64(k) * n.Distance
@@ -337,16 +437,22 @@ func (t *StorageLoadPackage) getMinReadingObjectSolution(sortedNodes []*download
return dist, downloadNode
}
+// getNodeDistance 获取节点距离
+// node: 需要计算距离的节点
+// 返回值: 返回节点与当前节点或位置的距离
func (t *StorageLoadPackage) getNodeDistance(node cdssdk.Node) float64 {
+ // 如果有本地节点ID且与目标节点ID相同,返回同一节点距离
if stgglb.Local.NodeID != nil {
if node.NodeID == *stgglb.Local.NodeID {
return consts.NodeDistanceSameNode
}
}
+ // 如果节点位置与本地位置相同,返回同一位置距离
if node.LocationID == stgglb.Local.LocationID {
return consts.NodeDistanceSameLocation
}
+ // 默认返回其他距离
return consts.NodeDistanceOther
}
diff --git a/agent/internal/task/task.go b/agent/internal/task/task.go
index 469c0d0..b0ed403 100644
--- a/agent/internal/task/task.go
+++ b/agent/internal/task/task.go
@@ -1,30 +1,35 @@
package task
import (
- "gitlink.org.cn/cloudream/common/pkgs/distlock"
- "gitlink.org.cn/cloudream/common/pkgs/task"
- "gitlink.org.cn/cloudream/storage/common/pkgs/connectivity"
- "gitlink.org.cn/cloudream/storage/common/pkgs/ioswitch"
+ "gitlink.org.cn/cloudream/common/pkgs/distlock" // 引入分布式锁服务
+ "gitlink.org.cn/cloudream/common/pkgs/task" // 引入任务处理相关的包
+ "gitlink.org.cn/cloudream/storage/common/pkgs/connectivity" // 引入网络连接状态收集器
+ "gitlink.org.cn/cloudream/storage/common/pkgs/ioswitch" // 引入IO开关服务
)
+// TaskContext 定义了任务执行的上下文环境,包含分布式锁服务、IO开关和网络连接状态收集器
type TaskContext struct {
distlock *distlock.Service
sw *ioswitch.Switch
connectivity *connectivity.Collector
}
-// 需要在Task结束后主动调用,completing函数将在Manager加锁期间被调用,
-// 因此适合进行执行结果的设置
+// CompleteFn 类型定义了任务完成时需要执行的函数,用于设置任务的执行结果
type CompleteFn = task.CompleteFn
+// Manager 类型代表任务管理器,用于创建、管理和调度任务
type Manager = task.Manager[TaskContext]
+// TaskBody 类型定义了任务体,包含了任务的具体执行逻辑
type TaskBody = task.TaskBody[TaskContext]
+// Task 类型代表一个具体的任务,包含了任务的上下文、执行体和其它相关信息
type Task = task.Task[TaskContext]
+// CompleteOption 类型定义了任务完成时的选项,可用于定制化任务完成的处理方式
type CompleteOption = task.CompleteOption
+// NewManager 创建并返回一个新的任务管理器实例,需要提供分布式锁服务、IO开关和网络连接状态收集器
func NewManager(distlock *distlock.Service, sw *ioswitch.Switch, connectivity *connectivity.Collector) Manager {
return task.NewManager(TaskContext{
distlock: distlock,
diff --git a/agent/main.go b/agent/main.go
index b1aa82b..7e85e2c 100644
--- a/agent/main.go
+++ b/agent/main.go
@@ -31,38 +31,46 @@ import (
// TODO 此数据是否在运行时会发生变化?
var AgentIpList []string
+// 主程序入口
func main() {
- // TODO 放到配置里读取
+ // TODO: 将Agent的IP列表放到配置文件中读取
AgentIpList = []string{"pcm01", "pcm1", "pcm2"}
+ // 初始化配置
err := config.Init()
if err != nil {
fmt.Printf("init config failed, err: %s", err.Error())
os.Exit(1)
}
+ // 初始化日志系统
err = log.Init(&config.Cfg().Logger)
if err != nil {
fmt.Printf("init logger failed, err: %s", err.Error())
os.Exit(1)
}
+ // 初始化全局变量和连接池
stgglb.InitLocal(&config.Cfg().Local)
stgglb.InitMQPool(&config.Cfg().RabbitMQ)
stgglb.InitAgentRPCPool(&agtrpc.PoolConfig{})
stgglb.InitIPFSPool(&config.Cfg().IPFS)
- // 启动网络连通性检测,并就地检测一次
+ // 启动网络连通性检测,并进行一次就地检测
conCol := connectivity.NewCollector(&config.Cfg().Connectivity, func(collector *connectivity.Collector) {
log := log.WithField("Connectivity", "")
+ // 从协调器MQ连接池获取客户端
coorCli, err := stgglb.CoordinatorMQPool.Acquire()
if err != nil {
log.Warnf("acquire coordinator mq failed, err: %s", err.Error())
return
}
+
+ // 确保在函数返回前释放客户端
defer stgglb.CoordinatorMQPool.Release(coorCli)
+ // 处理网络连通性数据,并更新到协调器
cons := collector.GetAll()
nodeCons := make([]cdssdk.NodeConnectivity, 0, len(cons))
for _, con := range cons {
@@ -87,21 +95,22 @@ func main() {
})
conCol.CollectInPlace()
+ // 初始化分布式锁服务
distlock, err := distlock.NewService(&config.Cfg().DistLock)
if err != nil {
log.Fatalf("new ipfs failed, err: %s", err.Error())
}
+ // 初始化数据切换开关
sw := ioswitch.NewSwitch()
- //处置协调端、客户端命令(可多建几个)
+ // 启动任务管理器和相关服务
wg := sync.WaitGroup{}
wg.Add(4)
taskMgr := task.NewManager(distlock, &sw, &conCol)
// 启动命令服务器
- // TODO 需要设计AgentID持久化机制
agtSvr, err := agtmq.NewServer(cmdsvc.NewService(&taskMgr, &sw), config.Cfg().ID, &config.Cfg().RabbitMQ)
if err != nil {
log.Fatalf("new agent server failed, err: %s", err.Error())
@@ -112,7 +121,7 @@ func main() {
go serveAgentServer(agtSvr, &wg)
- //面向客户端收发数据
+ // 启动面向客户端的GRPC服务
listenAddr := config.Cfg().GRPC.MakeListenAddress()
lis, err := net.Listen("tcp", listenAddr)
if err != nil {
@@ -123,11 +132,16 @@ func main() {
agtrpc.RegisterAgentServer(s, grpcsvc.NewService(&sw))
go serveGRPC(s, lis, &wg)
+ // 启动分布式锁服务的处理程序
go serveDistLock(distlock)
+ // 等待所有服务结束
wg.Wait()
}
+// serveAgentServer 启动并服务一个命令服务器
+// server: 指向agtmq.Server的指针,代表要被服务的命令服务器
+// wg: 指向sync.WaitGroup的指针,用于等待服务器停止
func serveAgentServer(server *agtmq.Server, wg *sync.WaitGroup) {
log.Info("start serving command server")
@@ -139,9 +153,13 @@ func serveAgentServer(server *agtmq.Server, wg *sync.WaitGroup) {
log.Info("command server stopped")
- wg.Done()
+ wg.Done() // 表示服务器已经停止
}
+// serveGRPC 启动并服务一个gRPC服务器
+// s: 指向grpc.Server的指针,代表要被服务的gRPC服务器
+// lis: 网络监听器,用于监听gRPC请求
+// wg: 指向sync.WaitGroup的指针,用于等待服务器停止
func serveGRPC(s *grpc.Server, lis net.Listener, wg *sync.WaitGroup) {
log.Info("start serving grpc")
@@ -153,9 +171,11 @@ func serveGRPC(s *grpc.Server, lis net.Listener, wg *sync.WaitGroup) {
log.Info("grpc stopped")
- wg.Done()
+ wg.Done() // 表示gRPC服务器已经停止
}
+// serveDistLock 启动并服务一个分布式锁服务
+// svc: 指向distlock.Service的指针,代表要被服务的分布式锁服务
func serveDistLock(svc *distlock.Service) {
log.Info("start serving distlock")
diff --git a/client/internal/cmdline/bucket.go b/client/internal/cmdline/bucket.go
index 208a449..b8dd0f6 100644
--- a/client/internal/cmdline/bucket.go
+++ b/client/internal/cmdline/bucket.go
@@ -7,16 +7,22 @@ import (
cdssdk "gitlink.org.cn/cloudream/common/sdks/storage"
)
+// BucketListUserBuckets 列出指定用户的存储桶列表。
+// ctx: 命令上下文,提供必要的服务和配置。
+// 返回值: 执行错误时返回error。
func BucketListUserBuckets(ctx CommandContext) error {
userID := cdssdk.UserID(1)
+ // 获取指定用户ID的存储桶列表
buckets, err := ctx.Cmdline.Svc.BucketSvc().GetUserBuckets(userID)
if err != nil {
return err
}
+ // 打印找到的存储桶数量和用户ID
fmt.Printf("Find %d buckets for user %d:\n", len(buckets), userID)
+ // 构建存储桶列表的表格显示
tb := table.NewWriter()
tb.AppendHeader(table.Row{"ID", "Name", "CreatorID"})
@@ -24,38 +30,55 @@ func BucketListUserBuckets(ctx CommandContext) error {
tb.AppendRow(table.Row{bucket.BucketID, bucket.Name, bucket.CreatorID})
}
+ // 打印存储桶列表表格
fmt.Print(tb.Render())
return nil
}
+// BucketCreateBucket 为指定用户创建一个新的存储桶。
+// ctx: 命令上下文,提供必要的服务和配置。
+// bucketName: 新存储桶的名称。
+// 返回值: 执行错误时返回error。
func BucketCreateBucket(ctx CommandContext, bucketName string) error {
userID := cdssdk.UserID(1)
+ // 创建存储桶并获取新存储桶的ID
bucketID, err := ctx.Cmdline.Svc.BucketSvc().CreateBucket(userID, bucketName)
if err != nil {
return err
}
+ // 打印创建存储桶成功的消息
fmt.Printf("Create bucket %s success, id: %d", bucketName, bucketID)
return nil
}
+// BucketDeleteBucket 删除指定的存储桶。
+// ctx: 命令上下文,提供必要的服务和配置。
+// bucketID: 要删除的存储桶ID。
+// 返回值: 执行错误时返回error。
func BucketDeleteBucket(ctx CommandContext, bucketID cdssdk.BucketID) error {
userID := cdssdk.UserID(1)
+ // 删除指定的存储桶
err := ctx.Cmdline.Svc.BucketSvc().DeleteBucket(userID, bucketID)
if err != nil {
return err
}
+ // 打印删除成功的消息
fmt.Printf("Delete bucket %d success ", bucketID)
return nil
}
+// 初始化命令注册
func init() {
+ // 注册列出用户存储桶的命令
commands.MustAdd(BucketListUserBuckets, "bucket", "ls")
+ // 注册创建存储桶的命令
commands.MustAdd(BucketCreateBucket, "bucket", "new")
+ // 注册删除存储桶的命令
commands.MustAdd(BucketDeleteBucket, "bucket", "delete")
}
diff --git a/client/internal/cmdline/cache.go b/client/internal/cmdline/cache.go
index ac32e92..fb270bb 100644
--- a/client/internal/cmdline/cache.go
+++ b/client/internal/cmdline/cache.go
@@ -7,17 +7,25 @@ import (
cdssdk "gitlink.org.cn/cloudream/common/sdks/storage"
)
+// CacheMovePackage 移动缓存包到指定节点。
+// ctx: 命令上下文环境。
+// packageID: 待移动的包ID。
+// nodeID: 目标节点ID。
+// 返回值: 移动成功返回nil,失败返回error。
func CacheMovePackage(ctx CommandContext, packageID cdssdk.PackageID, nodeID cdssdk.NodeID) error {
startTime := time.Now()
defer func() {
+ // 打印函数执行时间
fmt.Printf("%v\n", time.Since(startTime).Seconds())
}()
+ // 开始移动缓存包任务
taskID, err := ctx.Cmdline.Svc.CacheSvc().StartCacheMovePackage(1, packageID, nodeID)
if err != nil {
return fmt.Errorf("start cache moving package: %w", err)
}
+ // 循环等待缓存包移动完成
for {
complete, err := ctx.Cmdline.Svc.CacheSvc().WaitCacheMovePackage(nodeID, taskID, time.Second*10)
if complete {
@@ -34,12 +42,20 @@ func CacheMovePackage(ctx CommandContext, packageID cdssdk.PackageID, nodeID cds
}
}
+// CacheRemovePackage 从缓存中移除指定的包。
+// ctx: 命令上下文环境。
+// packageID: 待移除的包ID。
+// nodeID: 缓存节点ID。
+// 返回值: 移除成功返回nil,失败返回error。
func CacheRemovePackage(ctx CommandContext, packageID cdssdk.PackageID, nodeID cdssdk.NodeID) error {
return ctx.Cmdline.Svc.CacheSvc().CacheRemovePackage(packageID, nodeID)
}
+// 初始化命令列表
func init() {
+ // 添加移动缓存包命令
commands.Add(CacheMovePackage, "cache", "move")
+ // 添加移除缓存包命令
commands.Add(CacheRemovePackage, "cache", "remove")
}
diff --git a/client/internal/cmdline/commandline.go b/client/internal/cmdline/commandline.go
index 2b71d93..3943b59 100644
--- a/client/internal/cmdline/commandline.go
+++ b/client/internal/cmdline/commandline.go
@@ -8,26 +8,36 @@ import (
"gitlink.org.cn/cloudream/storage/client/internal/services"
)
+// CommandContext 命令上下文,存储与命令行相关的上下文信息。
type CommandContext struct {
- Cmdline *Commandline
+ Cmdline *Commandline // 指向当前的Commandline实例。
}
+// commands 用于存储所有已注册的命令及其相关信息的Trie树。
var commands cmdtrie.CommandTrie[CommandContext, error] = cmdtrie.NewCommandTrie[CommandContext, error]()
+// Commandline 命令行对象,封装了与服务交互的能力。
type Commandline struct {
- Svc *services.Service
+ Svc *services.Service // 指向内部服务接口。
}
+// NewCommandline 创建一个新的Commandline实例。
+// svc: 指向内部服务的实例。
+// 返回值: 初始化好的Commandline指针及可能的错误。
func NewCommandline(svc *services.Service) (*Commandline, error) {
return &Commandline{
Svc: svc,
}, nil
}
+// DispatchCommand 分发并执行命令。
+// allArgs: 命令行中所有的参数。
+// 功能: 根据参数执行相应的命令逻辑,出错时退出程序。
func (c *Commandline) DispatchCommand(allArgs []string) {
cmdCtx := CommandContext{
Cmdline: c,
}
+ // 执行命令,根据命令执行结果做相应处理。
cmdErr, err := commands.Execute(cmdCtx, allArgs, cmdtrie.ExecuteOption{ReplaceEmptyArrayWithNil: true})
if err != nil {
fmt.Printf("execute command failed, err: %s", err.Error())
@@ -39,6 +49,11 @@ func (c *Commandline) DispatchCommand(allArgs []string) {
}
}
+// MustAddCmd 必须添加命令。
+// fn: 命令执行的函数。
+// prefixWords: 命令的前缀词。
+// 返回值: 无。
+// 功能: 向命令树中添加命令,添加失败时会抛出异常。
func MustAddCmd(fn any, prefixWords ...string) any {
commands.MustAdd(fn, prefixWords...)
return nil
diff --git a/client/internal/cmdline/distlock.go b/client/internal/cmdline/distlock.go
index 59a103c..73b792e 100644
--- a/client/internal/cmdline/distlock.go
+++ b/client/internal/cmdline/distlock.go
@@ -9,9 +9,14 @@ import (
"gitlink.org.cn/cloudream/storage/common/pkgs/distlock/lockprovider"
)
+// DistLockLock 尝试获取分布式锁。
+// ctx: 命令上下文,包含执行命令所需的服务和配置。
+// lockData: 锁数据数组,每个元素包含锁的路径、名称和目标。
+// 返回值: 获取锁失败时返回错误。
func DistLockLock(ctx CommandContext, lockData []string) error {
req := distlock.LockRequest{}
+ // 解析锁数据,填充请求结构体。
for _, lock := range lockData {
l, err := parseOneLock(lock)
if err != nil {
@@ -21,6 +26,7 @@ func DistLockLock(ctx CommandContext, lockData []string) error {
req.Locks = append(req.Locks, l)
}
+ // 请求分布式锁。
reqID, err := ctx.Cmdline.Svc.DistLock.Acquire(req)
if err != nil {
return fmt.Errorf("acquire locks failed, err: %w", err)
@@ -31,9 +37,13 @@ func DistLockLock(ctx CommandContext, lockData []string) error {
return nil
}
+// parseOneLock 解析单个锁数据。
+// lockData: 待解析的锁数据,格式为"路径/名称@目标字符串"。
+// 返回值: 解析得到的锁对象和可能的错误。
func parseOneLock(lockData string) (distlock.Lock, error) {
var lock distlock.Lock
+ // 解析锁的路径、名称和目标。
fullPathAndTarget := strings.Split(lockData, "@")
if len(fullPathAndTarget) != 2 {
return lock, fmt.Errorf("lock data must contains lock path, name and target")
@@ -47,6 +57,7 @@ func parseOneLock(lockData string) (distlock.Lock, error) {
lock.Path = pathAndName[0 : len(pathAndName)-1]
lock.Name = pathAndName[len(pathAndName)-1]
+ // 解析目标字符串。
target := lockprovider.NewStringLockTarget()
comps := strings.Split(fullPathAndTarget[1], "/")
for _, comp := range comps {
@@ -58,11 +69,16 @@ func parseOneLock(lockData string) (distlock.Lock, error) {
return lock, nil
}
+// DistLockUnlock 释放分布式锁。
+// ctx: 命令上下文。
+// reqID: 请求ID,对应获取锁时返回的ID。
+// 返回值: 释放锁失败时返回错误。
func DistLockUnlock(ctx CommandContext, reqID string) error {
ctx.Cmdline.Svc.DistLock.Release(reqID)
return nil
}
+// 初始化命令行工具,注册分布式锁相关命令。
func init() {
commands.MustAdd(DistLockLock, "distlock", "lock")
diff --git a/client/internal/cmdline/object.go b/client/internal/cmdline/object.go
index 8c36a58..1090e79 100644
--- a/client/internal/cmdline/object.go
+++ b/client/internal/cmdline/object.go
@@ -10,20 +10,32 @@ import (
"gitlink.org.cn/cloudream/storage/common/pkgs/iterator"
)
+// 必须添加的命令函数,用于处理对象上传。
+//
+// ctx: 命令上下文,提供必要的服务和环境配置。
+// packageID: 上传套餐的唯一标识。
+// rootPath: 本地文件系统中待上传文件的根目录。
+// nodeAffinity: 偏好的节点ID列表,上传任务可能会分配到这些节点上。
+// 返回值: 执行过程中遇到的任何错误。
var _ = MustAddCmd(func(ctx CommandContext, packageID cdssdk.PackageID, rootPath string, nodeAffinity []cdssdk.NodeID) error {
+ // 记录函数开始时间,用于计算执行时间。
startTime := time.Now()
defer func() {
+ // 打印函数执行时间。
fmt.Printf("%v\n", time.Since(startTime).Seconds())
}()
+ // 模拟或获取用户ID。
userID := cdssdk.UserID(1)
+ // 遍历根目录下所有文件,收集待上传的文件路径。
var uploadFilePathes []string
err := filepath.WalkDir(rootPath, func(fname string, fi os.DirEntry, err error) error {
if err != nil {
return nil
}
+ // 仅添加非目录文件路径。
if !fi.IsDir() {
uploadFilePathes = append(uploadFilePathes, fname)
}
@@ -31,24 +43,32 @@ var _ = MustAddCmd(func(ctx CommandContext, packageID cdssdk.PackageID, rootPath
return nil
})
if err != nil {
+ // 目录遍历失败处理。
return fmt.Errorf("open directory %s failed, err: %w", rootPath, err)
}
+ // 根据节点亲和性列表设置首选上传节点。
var nodeAff *cdssdk.NodeID
if len(nodeAffinity) > 0 {
n := cdssdk.NodeID(nodeAffinity[0])
nodeAff = &n
}
+ // 创建上传对象迭代器。
objIter := iterator.NewUploadingObjectIterator(rootPath, uploadFilePathes)
+ // 开始上传任务。
taskID, err := ctx.Cmdline.Svc.ObjectSvc().StartUploading(userID, packageID, objIter, nodeAff)
if err != nil {
+ // 上传任务启动失败处理。
return fmt.Errorf("update objects to package %d failed, err: %w", packageID, err)
}
+ // 循环等待上传任务完成。
for {
+ // 每5秒检查一次上传状态。
complete, _, err := ctx.Cmdline.Svc.ObjectSvc().WaitUploading(taskID, time.Second*5)
if complete {
+ // 上传完成,检查是否有错误。
if err != nil {
return fmt.Errorf("uploading objects: %w", err)
}
@@ -56,6 +76,7 @@ var _ = MustAddCmd(func(ctx CommandContext, packageID cdssdk.PackageID, rootPath
return nil
}
+ // 等待过程中发生错误处理。
if err != nil {
return fmt.Errorf("wait updating: %w", err)
}
diff --git a/client/internal/cmdline/package.go b/client/internal/cmdline/package.go
index eb980fe..3db06cd 100644
--- a/client/internal/cmdline/package.go
+++ b/client/internal/cmdline/package.go
@@ -12,6 +12,16 @@ import (
"gitlink.org.cn/cloudream/storage/common/pkgs/iterator"
)
+// PackageListBucketPackages 列出指定存储桶中的所有包裹。
+//
+// 参数:
+//
+// ctx - 命令上下文。
+// bucketID - 存储桶ID。
+//
+// 返回值:
+//
+// error - 操作过程中发生的任何错误。
func PackageListBucketPackages(ctx CommandContext, bucketID cdssdk.BucketID) error {
userID := cdssdk.UserID(1)
@@ -33,6 +43,17 @@ func PackageListBucketPackages(ctx CommandContext, bucketID cdssdk.BucketID) err
return nil
}
+// PackageDownloadPackage 下载指定包裹的所有文件到本地目录。
+//
+// 参数:
+//
+// ctx - 命令上下文。
+// packageID - 包裹ID。
+// outputDir - 输出目录路径。
+//
+// 返回值:
+//
+// error - 操作过程中发生的任何错误。
func PackageDownloadPackage(ctx CommandContext, packageID cdssdk.PackageID, outputDir string) error {
startTime := time.Now()
defer func() {
@@ -46,7 +67,7 @@ func PackageDownloadPackage(ctx CommandContext, packageID cdssdk.PackageID, outp
return fmt.Errorf("create output directory %s failed, err: %w", outputDir, err)
}
- // 下载文件
+ // 初始化文件下载迭代器
objIter, err := ctx.Cmdline.Svc.PackageSvc().DownloadPackage(userID, packageID)
if err != nil {
return fmt.Errorf("download object failed, err: %w", err)
@@ -98,6 +119,17 @@ func PackageDownloadPackage(ctx CommandContext, packageID cdssdk.PackageID, outp
return nil
}
+// PackageCreatePackage 在指定存储桶中创建新包裹。
+//
+// 参数:
+//
+// ctx - 命令上下文。
+// bucketID - 存储桶ID。
+// name - 包裹名称。
+//
+// 返回值:
+//
+// error - 操作过程中发生的任何错误。
func PackageCreatePackage(ctx CommandContext, bucketID cdssdk.BucketID, name string) error {
userID := cdssdk.UserID(1)
@@ -110,6 +142,16 @@ func PackageCreatePackage(ctx CommandContext, bucketID cdssdk.BucketID, name str
return nil
}
+// PackageDeletePackage 删除指定的包裹。
+//
+// 参数:
+//
+// ctx - 命令上下文。
+// packageID - 包裹ID。
+//
+// 返回值:
+//
+// error - 操作过程中发生的任何错误。
func PackageDeletePackage(ctx CommandContext, packageID cdssdk.PackageID) error {
userID := cdssdk.UserID(1)
err := ctx.Cmdline.Svc.PackageSvc().DeletePackage(userID, packageID)
@@ -119,6 +161,16 @@ func PackageDeletePackage(ctx CommandContext, packageID cdssdk.PackageID) error
return nil
}
+// PackageGetCachedNodes 获取指定包裹的缓存节点信息。
+//
+// 参数:
+//
+// ctx - 命令上下文。
+// packageID - 包裹ID。
+//
+// 返回值:
+//
+// error - 操作过程中发生的任何错误。
func PackageGetCachedNodes(ctx CommandContext, packageID cdssdk.PackageID) error {
userID := cdssdk.UserID(1)
resp, err := ctx.Cmdline.Svc.PackageSvc().GetCachedNodes(userID, packageID)
@@ -129,6 +181,16 @@ func PackageGetCachedNodes(ctx CommandContext, packageID cdssdk.PackageID) error
return nil
}
+// PackageGetLoadedNodes 获取指定包裹的已加载节点信息。
+//
+// 参数:
+//
+// ctx - 命令上下文。
+// packageID - 包裹ID。
+//
+// 返回值:
+//
+// error - 操作过程中发生的任何错误。
func PackageGetLoadedNodes(ctx CommandContext, packageID cdssdk.PackageID) error {
userID := cdssdk.UserID(1)
nodeIDs, err := ctx.Cmdline.Svc.PackageSvc().GetLoadedNodes(userID, packageID)
@@ -139,6 +201,7 @@ func PackageGetLoadedNodes(ctx CommandContext, packageID cdssdk.PackageID) error
return nil
}
+// 初始化命令行工具的包相关命令。
func init() {
commands.MustAdd(PackageListBucketPackages, "pkg", "ls")
diff --git a/client/internal/cmdline/scanner.go b/client/internal/cmdline/scanner.go
index e5aa0a8..cbfff20 100644
--- a/client/internal/cmdline/scanner.go
+++ b/client/internal/cmdline/scanner.go
@@ -8,38 +8,57 @@ import (
scevt "gitlink.org.cn/cloudream/storage/common/pkgs/mq/scanner/event"
)
+// parseScannerEventCmdTrie 是一个静态命令 trie 树,用于解析扫描器事件命令。
var parseScannerEventCmdTrie cmdtrie.StaticCommandTrie[any] = cmdtrie.NewStaticCommandTrie[any]()
+// ScannerPostEvent 发布扫描器事件。
+// ctx: 命令上下文。
+// args: 命令参数数组。
+// 返回值: 执行错误时返回 error。
func ScannerPostEvent(ctx CommandContext, args []string) error {
+ // 尝试执行解析扫描器事件命令。
ret, err := parseScannerEventCmdTrie.Execute(args, cmdtrie.ExecuteOption{ReplaceEmptyArrayWithNil: true})
if err != nil {
+ // 解析失败,返回错误信息。
return fmt.Errorf("execute parsing event command failed, err: %w", err)
}
+ // 发布解析得到的事件。
err = ctx.Cmdline.Svc.ScannerSvc().PostEvent(ret.(scevt.Event), false, false)
if err != nil {
+ // 发布事件失败,返回错误信息。
return fmt.Errorf("post event to scanner failed, err: %w", err)
}
return nil
}
+// 初始化函数,用于向 parseScannerEventCmdTrie 注册扫描器事件命令。
func init() {
+ // 注册 AgentCacheGC 事件。
parseScannerEventCmdTrie.MustAdd(scevt.NewAgentCacheGC, myreflect.TypeNameOf[scevt.AgentCacheGC]())
+ // 注册 AgentCheckCache 事件。
parseScannerEventCmdTrie.MustAdd(scevt.NewAgentCheckCache, myreflect.TypeNameOf[scevt.AgentCheckCache]())
+ // 注册 AgentCheckState 事件。
parseScannerEventCmdTrie.MustAdd(scevt.NewAgentCheckState, myreflect.TypeNameOf[scevt.AgentCheckState]())
+ // 注册 AgentStorageGC 事件。
parseScannerEventCmdTrie.MustAdd(scevt.NewAgentStorageGC, myreflect.TypeNameOf[scevt.AgentStorageGC]())
+ // 注册 AgentCheckStorage 事件。
parseScannerEventCmdTrie.MustAdd(scevt.NewAgentCheckStorage, myreflect.TypeNameOf[scevt.AgentCheckStorage]())
+ // 注册 CheckPackage 事件。
parseScannerEventCmdTrie.MustAdd(scevt.NewCheckPackage, myreflect.TypeNameOf[scevt.CheckPackage]())
+ // 注册 CheckPackageRedundancy 事件。
parseScannerEventCmdTrie.MustAdd(scevt.NewCheckPackageRedundancy, myreflect.TypeNameOf[scevt.CheckPackageRedundancy]())
+ // 注册 CleanPinned 事件。
parseScannerEventCmdTrie.MustAdd(scevt.NewCleanPinned, myreflect.TypeNameOf[scevt.CleanPinned]())
+ // 向命令行注册 ScannerPostEvent 命令。
commands.MustAdd(ScannerPostEvent, "scanner", "event")
}
diff --git a/client/internal/cmdline/serve.go b/client/internal/cmdline/serve.go
index 872ecd3..2e27c59 100644
--- a/client/internal/cmdline/serve.go
+++ b/client/internal/cmdline/serve.go
@@ -6,17 +6,24 @@ import (
"gitlink.org.cn/cloudream/storage/client/internal/http"
)
+// ServeHTTP 启动HTTP服务。
+// ctx: 命令行上下文,包含服务配置等信息。
+// args: 命令行参数,第一个参数可选地指定HTTP服务器监听地址。
+// 返回值: 如果启动过程中遇到错误,返回错误信息;否则返回nil。
func ServeHTTP(ctx CommandContext, args []string) error {
+ // 默认监听地址为":7890",如果提供了命令行参数,则使用参数指定的地址。
listenAddr := ":7890"
if len(args) > 0 {
listenAddr = args[0]
}
+ // 创建一个新的HTTP服务器实例。
httpSvr, err := http.NewServer(listenAddr, ctx.Cmdline.Svc)
if err != nil {
return fmt.Errorf("new http server: %w", err)
}
+ // 启动HTTP服务。
err = httpSvr.Serve()
if err != nil {
return fmt.Errorf("serving http: %w", err)
@@ -25,6 +32,7 @@ func ServeHTTP(ctx CommandContext, args []string) error {
return nil
}
+// 初始化函数,将ServeHTTP命令注册到命令列表中。
func init() {
commands.MustAdd(ServeHTTP, "serve", "http")
}
diff --git a/client/internal/cmdline/storage.go b/client/internal/cmdline/storage.go
index e2c65a6..13c6e20 100644
--- a/client/internal/cmdline/storage.go
+++ b/client/internal/cmdline/storage.go
@@ -7,17 +7,25 @@ import (
cdssdk "gitlink.org.cn/cloudream/common/sdks/storage"
)
+// StorageLoadPackage 加载指定的包到存储系统中。
+// ctx: 命令上下文,提供必要的服务和环境配置。
+// packageID: 需要加载的包的唯一标识。
+// storageID: 目标存储系统的唯一标识。
+// 返回值: 执行过程中遇到的任何错误。
func StorageLoadPackage(ctx CommandContext, packageID cdssdk.PackageID, storageID cdssdk.StorageID) error {
startTime := time.Now()
defer func() {
+ // 打印函数执行时间
fmt.Printf("%v\n", time.Since(startTime).Seconds())
}()
+ // 开始加载包到存储系统
nodeID, taskID, err := ctx.Cmdline.Svc.StorageSvc().StartStorageLoadPackage(1, packageID, storageID)
if err != nil {
return fmt.Errorf("start loading package to storage: %w", err)
}
+ // 循环等待加载完成
for {
complete, fullPath, err := ctx.Cmdline.Svc.StorageSvc().WaitStorageLoadPackage(nodeID, taskID, time.Second*10)
if complete {
@@ -35,17 +43,27 @@ func StorageLoadPackage(ctx CommandContext, packageID cdssdk.PackageID, storageI
}
}
+// StorageCreatePackage 创建一个新的包并上传到指定的存储系统。
+// ctx: 命令上下文,提供必要的服务和环境配置。
+// bucketID: 存储桶的唯一标识,包将被上传到这个存储桶中。
+// name: 新包的名称。
+// storageID: 目标存储系统的唯一标识。
+// path: 包在存储系统中的路径。
+// 返回值: 执行过程中遇到的任何错误。
func StorageCreatePackage(ctx CommandContext, bucketID cdssdk.BucketID, name string, storageID cdssdk.StorageID, path string) error {
startTime := time.Now()
defer func() {
+ // 打印函数执行时间
fmt.Printf("%v\n", time.Since(startTime).Seconds())
}()
+ // 开始创建并上传包到存储系统
nodeID, taskID, err := ctx.Cmdline.Svc.StorageSvc().StartStorageCreatePackage(1, bucketID, name, storageID, path, nil)
if err != nil {
return fmt.Errorf("start storage uploading package: %w", err)
}
+ // 循环等待上传完成
for {
complete, packageID, err := ctx.Cmdline.Svc.StorageSvc().WaitStorageCreatePackage(nodeID, taskID, time.Second*10)
if complete {
@@ -63,8 +81,11 @@ func StorageCreatePackage(ctx CommandContext, bucketID cdssdk.BucketID, name str
}
}
+// 初始化函数,注册加载包和创建包的命令到命令行解析器。
func init() {
+ // 注册加载包命令
commands.MustAdd(StorageLoadPackage, "stg", "pkg", "load")
+ // 注册创建包命令
commands.MustAdd(StorageCreatePackage, "stg", "pkg", "new")
}
diff --git a/client/internal/config/config.go b/client/internal/config/config.go
index cf6d64a..62fa7d4 100644
--- a/client/internal/config/config.go
+++ b/client/internal/config/config.go
@@ -23,6 +23,8 @@ type Config struct {
var cfg Config
+// Init 初始化client
+// TODO 这里的modeulName参数弄成可配置的更好
func Init() error {
return config.DefaultLoad("client", &cfg)
}
diff --git a/client/internal/http/bucket.go b/client/internal/http/bucket.go
index 7e749b0..526c1a3 100644
--- a/client/internal/http/bucket.go
+++ b/client/internal/http/bucket.go
@@ -9,53 +9,69 @@ import (
cdssdk "gitlink.org.cn/cloudream/common/sdks/storage"
)
+// BucketService 用于处理与存储桶相关的HTTP请求
type BucketService struct {
*Server
}
+// Bucket 返回BucketService的实例
func (s *Server) Bucket() *BucketService {
return &BucketService{
Server: s,
}
}
+// Create 创建一个新的存储桶
+// ctx *gin.Context: Gin框架的上下文对象,用于处理HTTP请求和响应
func (s *BucketService) Create(ctx *gin.Context) {
log := logger.WithField("HTTP", "Bucket.Create")
var req cdssdk.BucketCreateReq
+ // 尝试从HTTP请求绑定JSON请求体到结构体
if err := ctx.ShouldBindJSON(&req); err != nil {
log.Warnf("binding body: %s", err.Error())
+ // 绑定失败,返回错误信息
ctx.JSON(http.StatusBadRequest, Failed(errorcode.BadArgument, "missing argument or invalid argument"))
return
}
+ // 调用服务层方法,创建存储桶
bucketID, err := s.svc.BucketSvc().CreateBucket(req.UserID, req.BucketName)
if err != nil {
log.Warnf("creating bucket: %s", err.Error())
+ // 创建存储桶失败,返回错误信息
ctx.JSON(http.StatusOK, Failed(errorcode.OperationFailed, "create bucket failed"))
return
}
+ // 创建存储桶成功,返回成功响应
ctx.JSON(http.StatusOK, OK(cdssdk.BucketCreateResp{
BucketID: bucketID,
}))
}
+// Delete 删除指定的存储桶
+// ctx *gin.Context: Gin框架的上下文对象,用于处理HTTP请求和响应
func (s *BucketService) Delete(ctx *gin.Context) {
log := logger.WithField("HTTP", "Bucket.Delete")
var req cdssdk.BucketDeleteReq
+ // 尝试从HTTP请求绑定JSON请求体到结构体
if err := ctx.ShouldBindJSON(&req); err != nil {
log.Warnf("binding body: %s", err.Error())
+ // 绑定失败,返回错误信息
ctx.JSON(http.StatusBadRequest, Failed(errorcode.BadArgument, "missing argument or invalid argument"))
return
}
+ // 调用服务层方法,删除存储桶
if err := s.svc.BucketSvc().DeleteBucket(req.UserID, req.BucketID); err != nil {
log.Warnf("deleting bucket: %s", err.Error())
+ // 删除存储桶失败,返回错误信息
ctx.JSON(http.StatusOK, Failed(errorcode.OperationFailed, "delete bucket failed"))
return
}
+ // 删除存储桶成功,返回成功响应
ctx.JSON(http.StatusOK, OK(nil))
}
diff --git a/client/internal/http/cache.go b/client/internal/http/cache.go
index ab86fda..fb89bed 100644
--- a/client/internal/http/cache.go
+++ b/client/internal/http/cache.go
@@ -10,26 +10,34 @@ import (
cdssdk "gitlink.org.cn/cloudream/common/sdks/storage"
)
+// CacheService 缓存服务结构体,依赖于Server
type CacheService struct {
*Server
}
+// Cache 返回CacheService的实例
func (s *Server) Cache() *CacheService {
return &CacheService{
Server: s,
}
}
+// CacheMovePackageReq 移动缓存包的请求参数
type CacheMovePackageReq struct {
UserID *cdssdk.UserID `json:"userID" binding:"required"`
PackageID *cdssdk.PackageID `json:"packageID" binding:"required"`
NodeID *cdssdk.NodeID `json:"nodeID" binding:"required"`
}
+
+// CacheMovePackageResp 移动缓存包的响应参数
type CacheMovePackageResp = cdssdk.CacheMovePackageResp
+// MovePackage 处理移动缓存包的请求
func (s *CacheService) MovePackage(ctx *gin.Context) {
+ // 初始化日志
log := logger.WithField("HTTP", "Cache.LoadPackage")
+ // 绑定请求JSON
var req CacheMovePackageReq
if err := ctx.ShouldBindJSON(&req); err != nil {
log.Warnf("binding body: %s", err.Error())
@@ -37,6 +45,7 @@ func (s *CacheService) MovePackage(ctx *gin.Context) {
return
}
+ // 开始移动缓存包任务
taskID, err := s.svc.CacheSvc().StartCacheMovePackage(*req.UserID, *req.PackageID, *req.NodeID)
if err != nil {
log.Warnf("start cache move package: %s", err.Error())
@@ -44,9 +53,12 @@ func (s *CacheService) MovePackage(ctx *gin.Context) {
return
}
+ // 循环等待缓存包移动完成
for {
+ // 检查移动是否完成
complete, err := s.svc.CacheSvc().WaitCacheMovePackage(*req.NodeID, taskID, time.Second*10)
if complete {
+ // 移动完成后的处理
if err != nil {
log.Warnf("moving complete with: %s", err.Error())
ctx.JSON(http.StatusOK, Failed(errorcode.OperationFailed, "cache move package failed"))
@@ -57,6 +69,7 @@ func (s *CacheService) MovePackage(ctx *gin.Context) {
return
}
+ // 等待移动过程中的错误处理
if err != nil {
log.Warnf("wait moving: %s", err.Error())
ctx.JSON(http.StatusOK, Failed(errorcode.OperationFailed, "cache move package failed"))
diff --git a/client/internal/http/node.go b/client/internal/http/node.go
index fa6d729..bd31119 100644
--- a/client/internal/http/node.go
+++ b/client/internal/http/node.go
@@ -9,37 +9,53 @@ import (
cdssdk "gitlink.org.cn/cloudream/common/sdks/storage"
)
+// NodeService 结构体代表了节点服务,它包含了一个Server实例。
type NodeService struct {
*Server
}
+// NodeSvc 为Server结构体提供一个方法,返回一个NodeService的实例。
+// 这个方法主要用于在Server实例中访问NodeService。
func (s *Server) NodeSvc() *NodeService {
return &NodeService{
Server: s,
}
}
+// GetNodesReq 结构体定义了获取节点信息请求的参数。
+// 它包含一个NodeIDs字段,该字段是需要查询的节点的ID列表,是必需的。
type GetNodesReq struct {
NodeIDs *[]cdssdk.NodeID `form:"nodeIDs" binding:"required"`
}
+
+// GetNodesResp 结构体与cdssdk包中的NodeGetNodesResp类型相同,用于定义获取节点信息的响应。
type GetNodesResp = cdssdk.NodeGetNodesResp
+// GetNodes 是一个处理获取节点信息请求的方法。
+// 它使用Gin框架的Context来处理HTTP请求,获取请求参数,并返回节点信息。
+// ctx *gin.Context: 代表当前的HTTP请求上下文。
func (s *ObjectService) GetNodes(ctx *gin.Context) {
+ // 初始化日志记录器,添加"HTTP"字段标识。
log := logger.WithField("HTTP", "Node.GetNodes")
var req GetNodesReq
+ // 尝试绑定查询参数到请求结构体,如果出错则返回错误信息。
if err := ctx.ShouldBindQuery(&req); err != nil {
log.Warnf("binding body: %s", err.Error())
+ // 参数绑定失败,返回400状态码和错误信息。
ctx.JSON(http.StatusBadRequest, Failed(errorcode.BadArgument, "missing argument or invalid argument"))
return
}
+ // 调用NodeSvc获取节点信息,如果出错则返回操作失败的错误信息。
nodes, err := s.svc.NodeSvc().GetNodes(*req.NodeIDs)
if err != nil {
log.Warnf("getting nodes: %s", err.Error())
+ // 获取节点信息失败,返回操作失败的错误信息。
ctx.JSON(http.StatusOK, Failed(errorcode.OperationFailed, "get nodes failed"))
return
}
+ // 节点信息获取成功,返回200状态码和节点信息。
ctx.JSON(http.StatusOK, OK(GetNodesResp{Nodes: nodes}))
}
diff --git a/client/internal/http/object.go b/client/internal/http/object.go
index d7dba18..651d74e 100644
--- a/client/internal/http/object.go
+++ b/client/internal/http/object.go
@@ -13,21 +13,25 @@ import (
myio "gitlink.org.cn/cloudream/common/utils/io"
)
+// ObjectService 服务结构体,处理对象相关的HTTP请求
type ObjectService struct {
*Server
}
+// Object 返回ObjectService的实例
func (s *Server) Object() *ObjectService {
return &ObjectService{
Server: s,
}
}
+// ObjectUploadReq 定义上传对象请求的结构体
type ObjectUploadReq struct {
- Info cdssdk.ObjectUploadInfo `form:"info" binding:"required"`
- Files []*multipart.FileHeader `form:"files"`
+ Info cdssdk.ObjectUploadInfo `form:"info" binding:"required"` // 上传信息
+ Files []*multipart.FileHeader `form:"files"` // 上传文件列表
}
+// Upload 处理对象上传请求
func (s *ObjectService) Upload(ctx *gin.Context) {
log := logger.WithField("HTTP", "Object.Upload")
@@ -38,18 +42,18 @@ func (s *ObjectService) Upload(ctx *gin.Context) {
return
}
- var err error
-
+ // 将multipart文件转换为上传对象
objIter := mapMultiPartFileToUploadingObject(req.Files)
+ // 开始上传任务
taskID, err := s.svc.ObjectSvc().StartUploading(req.Info.UserID, req.Info.PackageID, objIter, req.Info.NodeAffinity)
-
if err != nil {
log.Warnf("start uploading object task: %s", err.Error())
ctx.JSON(http.StatusOK, Failed(errorcode.OperationFailed, "start uploading task failed"))
return
}
+ // 等待上传任务完成
for {
complete, _, err := s.svc.ObjectSvc().WaitUploading(taskID, time.Second*5)
if complete {
@@ -58,7 +62,6 @@ func (s *ObjectService) Upload(ctx *gin.Context) {
ctx.JSON(http.StatusOK, Failed(errorcode.OperationFailed, "uploading object failed"))
return
}
-
ctx.JSON(http.StatusOK, OK(nil))
return
}
@@ -71,11 +74,13 @@ func (s *ObjectService) Upload(ctx *gin.Context) {
}
}
+// ObjectDownloadReq 定义下载对象请求的结构体
type ObjectDownloadReq struct {
- UserID *cdssdk.UserID `form:"userID" binding:"required"`
- ObjectID *cdssdk.ObjectID `form:"objectID" binding:"required"`
+ UserID *cdssdk.UserID `form:"userID" binding:"required"` // 用户ID
+ ObjectID *cdssdk.ObjectID `form:"objectID" binding:"required"` // 对象ID
}
+// Download 处理对象下载请求
func (s *ObjectService) Download(ctx *gin.Context) {
log := logger.WithField("HTTP", "Object.Download")
@@ -86,6 +91,7 @@ func (s *ObjectService) Download(ctx *gin.Context) {
return
}
+ // 下载对象
file, err := s.svc.ObjectSvc().Download(*req.UserID, *req.ObjectID)
if err != nil {
log.Warnf("downloading object: %s", err.Error())
@@ -93,11 +99,12 @@ func (s *ObjectService) Download(ctx *gin.Context) {
return
}
+ // 设置响应头,进行文件下载
ctx.Writer.WriteHeader(http.StatusOK)
- // TODO 需要设置FileName
ctx.Header("Content-Disposition", "attachment; filename=filename")
ctx.Header("Content-Type", "application/octet-stream")
+ // 通过流式传输返回文件内容
buf := make([]byte, 4096)
ctx.Stream(func(w io.Writer) bool {
rd, err := file.Read(buf)
@@ -124,12 +131,16 @@ func (s *ObjectService) Download(ctx *gin.Context) {
})
}
+// GetPackageObjectsReq 定义获取包内对象请求的结构体
type GetPackageObjectsReq struct {
- UserID *cdssdk.UserID `form:"userID" binding:"required"`
- PackageID *cdssdk.PackageID `form:"packageID" binding:"required"`
+ UserID *cdssdk.UserID `form:"userID" binding:"required"` // 用户ID
+ PackageID *cdssdk.PackageID `form:"packageID" binding:"required"` // 包ID
}
+
+// GetPackageObjectsResp 定义获取包内对象响应的结构体
type GetPackageObjectsResp = cdssdk.ObjectGetPackageObjectsResp
+// GetPackageObjects 处理获取包内对象的请求
func (s *ObjectService) GetPackageObjects(ctx *gin.Context) {
log := logger.WithField("HTTP", "Object.GetPackageObjects")
@@ -140,6 +151,7 @@ func (s *ObjectService) GetPackageObjects(ctx *gin.Context) {
return
}
+ // 获取包内的对象列表
objs, err := s.svc.ObjectSvc().GetPackageObjects(*req.UserID, *req.PackageID)
if err != nil {
log.Warnf("getting package objects: %s", err.Error())
@@ -147,5 +159,6 @@ func (s *ObjectService) GetPackageObjects(ctx *gin.Context) {
return
}
+ // 返回响应
ctx.JSON(http.StatusOK, OK(GetPackageObjectsResp{Objects: objs}))
}
diff --git a/client/internal/http/package.go b/client/internal/http/package.go
index cdaf3e0..e512903 100644
--- a/client/internal/http/package.go
+++ b/client/internal/http/package.go
@@ -14,24 +14,30 @@ import (
stgiter "gitlink.org.cn/cloudream/storage/common/pkgs/iterator"
)
+// PackageService 包服务,负责处理包相关的HTTP请求。
type PackageService struct {
*Server
}
+// Package 返回PackageService的实例。
func (s *Server) Package() *PackageService {
return &PackageService{
Server: s,
}
}
+// PackageGetReq 包含获取包信息请求所需的参数。
type PackageGetReq struct {
UserID *cdssdk.UserID `form:"userID" binding:"required"`
PackageID *cdssdk.PackageID `form:"packageID" binding:"required"`
}
+
+// PackageGetResp 包含获取包信息响应的结果。
type PackageGetResp struct {
model.Package
}
+// Get 处理获取包信息的HTTP请求。
func (s *PackageService) Get(ctx *gin.Context) {
log := logger.WithField("HTTP", "Package.Get")
@@ -52,6 +58,7 @@ func (s *PackageService) Get(ctx *gin.Context) {
ctx.JSON(http.StatusOK, OK(PackageGetResp{Package: *pkg}))
}
+// Create 处理创建新包的HTTP请求。
func (s *PackageService) Create(ctx *gin.Context) {
log := logger.WithField("HTTP", "Package.Create")
var req cdssdk.PackageCreateReq
@@ -73,11 +80,13 @@ func (s *PackageService) Create(ctx *gin.Context) {
}))
}
+// PackageDeleteReq 包含删除包请求所需的参数。
type PackageDeleteReq struct {
UserID *cdssdk.UserID `json:"userID" binding:"required"`
PackageID *cdssdk.PackageID `json:"packageID" binding:"required"`
}
+// Delete 处理删除包的HTTP请求。
func (s *PackageService) Delete(ctx *gin.Context) {
log := logger.WithField("HTTP", "Package.Delete")
@@ -98,14 +107,18 @@ func (s *PackageService) Delete(ctx *gin.Context) {
ctx.JSON(http.StatusOK, OK(nil))
}
+// GetCachedNodesReq 包含获取缓存节点请求所需的参数。
type GetCachedNodesReq struct {
UserID *cdssdk.UserID `json:"userID" binding:"required"`
PackageID *cdssdk.PackageID `json:"packageID" binding:"required"`
}
+
+// GetCachedNodesResp 包含获取缓存节点响应的结果。
type GetCachedNodesResp struct {
cdssdk.PackageCachingInfo
}
+// GetCachedNodes 处理获取包的缓存节点的HTTP请求。
func (s *PackageService) GetCachedNodes(ctx *gin.Context) {
log := logger.WithField("HTTP", "Package.GetCachedNodes")
@@ -126,15 +139,18 @@ func (s *PackageService) GetCachedNodes(ctx *gin.Context) {
ctx.JSON(http.StatusOK, OK(GetCachedNodesResp{resp}))
}
+// GetLoadedNodesReq 包含获取加载节点请求所需的参数。
type GetLoadedNodesReq struct {
UserID *cdssdk.UserID `json:"userID" binding:"required"`
PackageID *cdssdk.PackageID `json:"packageID" binding:"required"`
}
+// GetLoadedNodesResp 包含获取加载节点响应的结果。
type GetLoadedNodesResp struct {
NodeIDs []cdssdk.NodeID `json:"nodeIDs"`
}
+// GetLoadedNodes 处理获取包的加载节点的HTTP请求。
func (s *PackageService) GetLoadedNodes(ctx *gin.Context) {
log := logger.WithField("HTTP", "Package.GetLoadedNodes")
@@ -157,6 +173,7 @@ func (s *PackageService) GetLoadedNodes(ctx *gin.Context) {
}))
}
+// mapMultiPartFileToUploadingObject 将multipart文件转换为上传对象的迭代器。
func mapMultiPartFileToUploadingObject(files []*multipart.FileHeader) stgiter.UploadingObjectIterator {
return iterator.Map[*multipart.FileHeader](
iterator.Array(files...),
diff --git a/client/internal/http/server.go b/client/internal/http/server.go
index 462f76a..a748c70 100644
--- a/client/internal/http/server.go
+++ b/client/internal/http/server.go
@@ -7,12 +7,17 @@ import (
"gitlink.org.cn/cloudream/storage/client/internal/services"
)
+// Server 结构体定义了HTTP服务的基本配置和操作
type Server struct {
- engine *gin.Engine
- listenAddr string
- svc *services.Service
+ engine *gin.Engine // Gin框架的HTTP引擎
+ listenAddr string // 服务监听地址
+ svc *services.Service // 业务逻辑服务实例
}
+// NewServer 创建一个新的Server实例
+// listenAddr: 服务监听的地址
+// svc: 用于处理HTTP请求的业务逻辑服务实例
+// 返回值: 初始化好的Server实例和可能发生的错误
func NewServer(listenAddr string, svc *services.Service) (*Server, error) {
engine := gin.New()
@@ -23,8 +28,10 @@ func NewServer(listenAddr string, svc *services.Service) (*Server, error) {
}, nil
}
+// Serve 启动HTTP服务并监听请求
+// 返回值: 服务停止时可能发生的错误
func (s *Server) Serve() error {
- s.initRouters()
+ s.initRouters() // 初始化路由
logger.Infof("start serving http at: %s", s.listenAddr)
err := s.engine.Run(s.listenAddr)
@@ -38,23 +45,32 @@ func (s *Server) Serve() error {
return nil
}
+// initRouters 初始化所有HTTP请求的路由
+//
+// 它主要用于配置和初始化与HTTP请求相关的所有路由,
+// 包括对象存储、包管理、存储管理、缓存管理和存储桶管理等。
func (s *Server) initRouters() {
- s.engine.GET(cdssdk.ObjectDownloadPath, s.Object().Download)
- s.engine.POST(cdssdk.ObjectUploadPath, s.Object().Upload)
- s.engine.GET(cdssdk.ObjectGetPackageObjectsPath, s.Object().GetPackageObjects)
+ // 对象存储相关路由配置
+ s.engine.GET(cdssdk.ObjectDownloadPath, s.Object().Download) // 处理对象下载请求
+ s.engine.POST(cdssdk.ObjectUploadPath, s.Object().Upload) // 处理对象上传请求
+ s.engine.GET(cdssdk.ObjectGetPackageObjectsPath, s.Object().GetPackageObjects) // 处理获取包内对象请求
- s.engine.GET(cdssdk.PackageGetPath, s.Package().Get)
- s.engine.POST(cdssdk.PackageCreatePath, s.Package().Create)
- s.engine.POST("/package/delete", s.Package().Delete)
- s.engine.GET("/package/getCachedNodes", s.Package().GetCachedNodes)
- s.engine.GET("/package/getLoadedNodes", s.Package().GetLoadedNodes)
+ // 包管理相关路由配置
+ s.engine.GET(cdssdk.PackageGetPath, s.Package().Get) // 处理获取包信息请求
+ s.engine.POST(cdssdk.PackageCreatePath, s.Package().Create) // 处理创建包请求
+ s.engine.POST("/package/delete", s.Package().Delete) // 处理删除包请求
+ s.engine.GET("/package/getCachedNodes", s.Package().GetCachedNodes) // 处理获取缓存节点请求
+ s.engine.GET("/package/getLoadedNodes", s.Package().GetLoadedNodes) // 处理获取已加载节点请求
- s.engine.POST("/storage/loadPackage", s.Storage().LoadPackage)
- s.engine.POST("/storage/createPackage", s.Storage().CreatePackage)
- s.engine.GET("/storage/getInfo", s.Storage().GetInfo)
+ // 存储管理相关路由配置
+ s.engine.POST("/storage/loadPackage", s.Storage().LoadPackage) // 处理加载包请求
+ s.engine.POST("/storage/createPackage", s.Storage().CreatePackage) // 处理创建包请求
+ s.engine.GET("/storage/getInfo", s.Storage().GetInfo) // 处理获取存储信息请求
- s.engine.POST(cdssdk.CacheMovePackagePath, s.Cache().MovePackage)
+ // 缓存管理相关路由配置
+ s.engine.POST(cdssdk.CacheMovePackagePath, s.Cache().MovePackage) // 处理移动包到缓存请求
- s.engine.POST(cdssdk.BucketCreatePath, s.Bucket().Create)
- s.engine.POST(cdssdk.BucketDeletePath, s.Bucket().Delete)
+ // 存储桶管理相关路由配置
+ s.engine.POST(cdssdk.BucketCreatePath, s.Bucket().Create) // 处理创建存储桶请求
+ s.engine.POST(cdssdk.BucketDeletePath, s.Bucket().Delete) // 处理删除存储桶请求
}
diff --git a/client/internal/http/storage.go b/client/internal/http/storage.go
index 3ada149..3a017f1 100644
--- a/client/internal/http/storage.go
+++ b/client/internal/http/storage.go
@@ -10,26 +10,31 @@ import (
cdssdk "gitlink.org.cn/cloudream/common/sdks/storage"
)
+// StorageService 用于提供存储服务的相关操作
type StorageService struct {
*Server
}
+// Storage 返回StorageService的实例
func (s *Server) Storage() *StorageService {
return &StorageService{
Server: s,
}
}
+// StorageLoadPackageReq 定义加载存储包的请求参数
type StorageLoadPackageReq struct {
UserID *cdssdk.UserID `json:"userID" binding:"required"`
PackageID *cdssdk.PackageID `json:"packageID" binding:"required"`
StorageID *cdssdk.StorageID `json:"storageID" binding:"required"`
}
+// StorageLoadPackageResp 定义加载存储包的响应参数
type StorageLoadPackageResp struct {
cdssdk.StorageLoadPackageResp
}
+// LoadPackage 加载存储包
func (s *StorageService) LoadPackage(ctx *gin.Context) {
log := logger.WithField("HTTP", "Storage.LoadPackage")
@@ -72,6 +77,7 @@ func (s *StorageService) LoadPackage(ctx *gin.Context) {
}
}
+// StorageCreatePackageReq 定义创建存储包的请求参数
type StorageCreatePackageReq struct {
UserID *cdssdk.UserID `json:"userID" binding:"required"`
StorageID *cdssdk.StorageID `json:"storageID" binding:"required"`
@@ -81,10 +87,12 @@ type StorageCreatePackageReq struct {
NodeAffinity *cdssdk.NodeID `json:"nodeAffinity"`
}
+// StorageCreatePackageResp 定义创建存储包的响应参数
type StorageCreatePackageResp struct {
PackageID cdssdk.PackageID `json:"packageID"`
}
+// CreatePackage 创建存储包
func (s *StorageService) CreatePackage(ctx *gin.Context) {
log := logger.WithField("HTTP", "Storage.CreatePackage")
@@ -126,15 +134,18 @@ func (s *StorageService) CreatePackage(ctx *gin.Context) {
}
}
+// StorageGetInfoReq 定义获取存储信息的请求参数
type StorageGetInfoReq struct {
UserID *cdssdk.UserID `form:"userID" binding:"required"`
StorageID *cdssdk.StorageID `form:"storageID" binding:"required"`
}
+// StorageGetInfoResp 定义获取存储信息的响应参数
type StorageGetInfoResp struct {
cdssdk.StorageGetInfoResp
}
+// GetInfo 获取存储信息
func (s *StorageService) GetInfo(ctx *gin.Context) {
log := logger.WithField("HTTP", "Storage.GetInfo")
diff --git a/client/internal/services/agent.go b/client/internal/services/agent.go
index 1c7b4b4..a629682 100644
--- a/client/internal/services/agent.go
+++ b/client/internal/services/agent.go
@@ -1,9 +1,20 @@
+// services 包提供了与代理服务相关的功能。
package services
type AgentService struct {
- *Service
+ *Service // Service 是嵌入的基服务类型,为AgentService提供基本功能。
}
+// AgentSvc 是Service类型的一个方法,用于返回一个AgentService的实例。
+// 该方法允许通过Service实例来访问或操作AgentService相关功能。
+//
+// 参数:
+//
+// svc *Service - 指向当前Service实例的指针。
+//
+// 返回值:
+//
+// *AgentService - 指向新创建的AgentService实例的指针。
func (svc *Service) AgentSvc() *AgentService {
return &AgentService{Service: svc}
}
diff --git a/client/internal/services/bucket.go b/client/internal/services/bucket.go
index 4e297ce..67293ba 100644
--- a/client/internal/services/bucket.go
+++ b/client/internal/services/bucket.go
@@ -9,26 +9,37 @@ import (
coormq "gitlink.org.cn/cloudream/storage/common/pkgs/mq/coordinator"
)
+// BucketService 是对存储桶进行操作的服务类
type BucketService struct {
*Service
}
+// BucketSvc 创建并返回一个BucketService实例
func (svc *Service) BucketSvc() *BucketService {
return &BucketService{Service: svc}
}
+// GetBucket 根据用户ID和桶ID获取桶信息
+// userID: 用户的唯一标识
+// bucketID: 桶的唯一标识
+// 返回值: 桶的信息和可能发生的错误
func (svc *BucketService) GetBucket(userID cdssdk.UserID, bucketID cdssdk.BucketID) (model.Bucket, error) {
- // TODO
+ // TODO: 此函数尚未实现
panic("not implement yet")
}
+// GetUserBuckets 获取指定用户的所有桶信息
+// userID: 用户的唯一标识
+// 返回值: 用户的所有桶信息列表和可能发生的错误
func (svc *BucketService) GetUserBuckets(userID cdssdk.UserID) ([]model.Bucket, error) {
+ // 从CoordinatorMQPool中获取Coordinator客户端
coorCli, err := stgglb.CoordinatorMQPool.Acquire()
if err != nil {
return nil, fmt.Errorf("new coordinator client: %w", err)
}
- defer stgglb.CoordinatorMQPool.Release(coorCli)
+ defer stgglb.CoordinatorMQPool.Release(coorCli) // 确保客户端被释放
+ // 向Coordinator发送请求获取用户桶信息
resp, err := coorCli.GetUserBuckets(coormq.NewGetUserBuckets(userID))
if err != nil {
return nil, fmt.Errorf("get user buckets failed, err: %w", err)
@@ -37,13 +48,19 @@ func (svc *BucketService) GetUserBuckets(userID cdssdk.UserID) ([]model.Bucket,
return resp.Buckets, nil
}
+// GetBucketPackages 获取指定用户和桶的所有包
+// userID: 用户的唯一标识
+// bucketID: 桶的唯一标识
+// 返回值: 桶的所有包列表和可能发生的错误
func (svc *BucketService) GetBucketPackages(userID cdssdk.UserID, bucketID cdssdk.BucketID) ([]model.Package, error) {
+ // 获取Coordinator客户端
coorCli, err := stgglb.CoordinatorMQPool.Acquire()
if err != nil {
return nil, fmt.Errorf("new coordinator client: %w", err)
}
- defer stgglb.CoordinatorMQPool.Release(coorCli)
+ defer stgglb.CoordinatorMQPool.Release(coorCli) // 确保客户端被释放
+ // 请求Coordinator获取指定桶的包信息
resp, err := coorCli.GetBucketPackages(coormq.NewGetBucketPackages(userID, bucketID))
if err != nil {
return nil, fmt.Errorf("get bucket packages failed, err: %w", err)
@@ -52,13 +69,19 @@ func (svc *BucketService) GetBucketPackages(userID cdssdk.UserID, bucketID cdssd
return resp.Packages, nil
}
+// CreateBucket 创建一个新的桶
+// userID: 用户的唯一标识
+// bucketName: 桶的名称
+// 返回值: 新创建的桶的ID和可能发生的错误
func (svc *BucketService) CreateBucket(userID cdssdk.UserID, bucketName string) (cdssdk.BucketID, error) {
+ // 获取Coordinator客户端
coorCli, err := stgglb.CoordinatorMQPool.Acquire()
if err != nil {
return 0, fmt.Errorf("new coordinator client: %w", err)
}
- defer stgglb.CoordinatorMQPool.Release(coorCli)
+ defer stgglb.CoordinatorMQPool.Release(coorCli) // 确保客户端被释放
+ // 请求Coordinator创建新桶
resp, err := coorCli.CreateBucket(coormq.NewCreateBucket(userID, bucketName))
if err != nil {
return 0, fmt.Errorf("creating bucket: %w", err)
@@ -67,14 +90,19 @@ func (svc *BucketService) CreateBucket(userID cdssdk.UserID, bucketName string)
return resp.BucketID, nil
}
+// DeleteBucket 删除指定的桶
+// userID: 用户的唯一标识
+// bucketID: 桶的唯一标识
+// 返回值: 可能发生的错误
func (svc *BucketService) DeleteBucket(userID cdssdk.UserID, bucketID cdssdk.BucketID) error {
+ // 获取Coordinator客户端
coorCli, err := stgglb.CoordinatorMQPool.Acquire()
if err != nil {
return fmt.Errorf("new coordinator client: %w", err)
}
- defer stgglb.CoordinatorMQPool.Release(coorCli)
+ defer stgglb.CoordinatorMQPool.Release(coorCli) // 确保客户端被释放
- // TODO 检查用户是否有删除这个Bucket的权限。检查的时候可以只上UserBucket的Read锁
+ // TODO: 检查用户是否有删除这个Bucket的权限。检查的时候可以只上UserBucket的Read锁
_, err = coorCli.DeleteBucket(coormq.NewDeleteBucket(userID, bucketID))
if err != nil {
diff --git a/client/internal/services/cache.go b/client/internal/services/cache.go
index a32e456..f35098b 100644
--- a/client/internal/services/cache.go
+++ b/client/internal/services/cache.go
@@ -11,21 +11,30 @@ import (
coormq "gitlink.org.cn/cloudream/storage/common/pkgs/mq/coordinator"
)
+// CacheService 缓存服务结构体,继承自Service。
type CacheService struct {
*Service
}
+// CacheSvc 创建并返回一个CacheService的实例。
func (svc *Service) CacheSvc() *CacheService {
return &CacheService{Service: svc}
}
+// StartCacheMovePackage 启动缓存移动包的流程。
+// userID: 用户标识符;
+// packageID: 包标识符;
+// nodeID: 节点标识符;
+// 返回任务ID和可能的错误。
func (svc *CacheService) StartCacheMovePackage(userID cdssdk.UserID, packageID cdssdk.PackageID, nodeID cdssdk.NodeID) (string, error) {
+ // 获取Agent消息队列客户端
agentCli, err := stgglb.AgentMQPool.Acquire(nodeID)
if err != nil {
return "", fmt.Errorf("new agent client: %w", err)
}
defer stgglb.AgentMQPool.Release(agentCli)
+ // 向Agent发起启动缓存移动包的请求
startResp, err := agentCli.StartCacheMovePackage(agtmq.NewStartCacheMovePackage(userID, packageID))
if err != nil {
return "", fmt.Errorf("start cache move package: %w", err)
@@ -34,13 +43,20 @@ func (svc *CacheService) StartCacheMovePackage(userID cdssdk.UserID, packageID c
return startResp.TaskID, nil
}
+// WaitCacheMovePackage 等待缓存移动包完成。
+// nodeID: 节点标识符;
+// taskID: 任务标识符;
+// waitTimeout: 等待超时时间;
+// 返回任务是否完成和可能的错误。
func (svc *CacheService) WaitCacheMovePackage(nodeID cdssdk.NodeID, taskID string, waitTimeout time.Duration) (bool, error) {
+ // 获取Agent消息队列客户端
agentCli, err := stgglb.AgentMQPool.Acquire(nodeID)
if err != nil {
return true, fmt.Errorf("new agent client: %w", err)
}
defer stgglb.AgentMQPool.Release(agentCli)
+ // 向Agent查询缓存移动包状态
waitResp, err := agentCli.WaitCacheMovePackage(agtmq.NewWaitCacheMovePackage(taskID, waitTimeout.Milliseconds()))
if err != nil {
return true, fmt.Errorf("wait cache move package: %w", err)
@@ -57,13 +73,19 @@ func (svc *CacheService) WaitCacheMovePackage(nodeID cdssdk.NodeID, taskID strin
return true, nil
}
+// CacheRemovePackage 请求移除缓存包。
+// packageID: 包标识符;
+// nodeID: 节点标识符;
+// 返回可能的错误。
func (svc *CacheService) CacheRemovePackage(packageID cdssdk.PackageID, nodeID cdssdk.NodeID) error {
+ // 获取协调器消息队列客户端
coorCli, err := stgglb.CoordinatorMQPool.Acquire()
if err != nil {
return fmt.Errorf("new agent client: %w", err)
}
defer stgglb.CoordinatorMQPool.Release(coorCli)
+ // 向协调器发送移除缓存包的请求
_, err = coorCli.CacheRemovePackage(coormq.ReqCacheRemoveMovedPackage(packageID, nodeID))
if err != nil {
return fmt.Errorf("requesting to coordinator: %w", err)
diff --git a/client/internal/services/node.go b/client/internal/services/node.go
index a4c0c7f..272c885 100644
--- a/client/internal/services/node.go
+++ b/client/internal/services/node.go
@@ -8,25 +8,40 @@ import (
coormq "gitlink.org.cn/cloudream/storage/common/pkgs/mq/coordinator"
)
+// NodeService 是关于节点操作的服务结构体
type NodeService struct {
*Service
}
+// NodeSvc 创建并返回一个NodeService的实例
func (svc *Service) NodeSvc() *NodeService {
return &NodeService{Service: svc}
}
+// GetNodes 根据提供的节点ID列表,获取对应的节点信息
+// 参数:
+//
+// nodeIDs []cdssdk.NodeID - 需要查询的节点ID列表
+//
+// 返回值:
+//
+// []cdssdk.Node - 获取到的节点信息列表
+// error - 如果过程中发生错误,则返回错误信息
func (svc *NodeService) GetNodes(nodeIDs []cdssdk.NodeID) ([]cdssdk.Node, error) {
+ // 从协调器MQ池中获取一个客户端实例
coorCli, err := stgglb.CoordinatorMQPool.Acquire()
if err != nil {
return nil, fmt.Errorf("new coordinator client: %w", err)
}
+ // 确保在函数结束时释放客户端实例回池
defer stgglb.CoordinatorMQPool.Release(coorCli)
+ // 向协调器发送获取节点信息的请求
getResp, err := coorCli.GetNodes(coormq.NewGetNodes(nodeIDs))
if err != nil {
- return nil, fmt.Errorf("requsting to coodinator: %w", err)
+ return nil, fmt.Errorf("requesting to coordinator: %w", err)
}
+ // 返回获取到的节点信息
return getResp.Nodes, nil
}
diff --git a/client/internal/services/object.go b/client/internal/services/object.go
index 0dc7374..b52d4ab 100644
--- a/client/internal/services/object.go
+++ b/client/internal/services/object.go
@@ -13,19 +13,31 @@ import (
coormq "gitlink.org.cn/cloudream/storage/common/pkgs/mq/coordinator"
)
+// ObjectService 定义了对象服务,负责管理对象的上传、下载等操作。
type ObjectService struct {
*Service
}
+// ObjectSvc 返回一个ObjectService的实例。
func (svc *Service) ObjectSvc() *ObjectService {
return &ObjectService{Service: svc}
}
+// StartUploading 开始上传对象。
+// userID: 用户ID。
+// packageID: 套件ID。
+// objIter: 正在上传的对象迭代器。
+// nodeAffinity: 节点亲和性,指定对象上传的首选节点。
+// 返回值: 任务ID和错误信息。
func (svc *ObjectService) StartUploading(userID cdssdk.UserID, packageID cdssdk.PackageID, objIter iterator.UploadingObjectIterator, nodeAffinity *cdssdk.NodeID) (string, error) {
tsk := svc.TaskMgr.StartNew(mytask.NewUploadObjects(userID, packageID, objIter, nodeAffinity))
return tsk.ID(), nil
}
+// WaitUploading 等待上传任务完成。
+// taskID: 任务ID。
+// waitTimeout: 等待超时时间。
+// 返回值: 任务是否完成、上传结果和错误信息。
func (svc *ObjectService) WaitUploading(taskID string, waitTimeout time.Duration) (bool, *mytask.UploadObjectsResult, error) {
tsk := svc.TaskMgr.FindByID(taskID)
if tsk.WaitTimeout(waitTimeout) {
@@ -35,20 +47,28 @@ func (svc *ObjectService) WaitUploading(taskID string, waitTimeout time.Duration
return false, nil, nil
}
+// Download 下载对象。当前未实现。
+// userID: 用户ID。
+// objectID: 对象ID。
+// 返回值: 读取关闭器和错误信息。
func (svc *ObjectService) Download(userID cdssdk.UserID, objectID cdssdk.ObjectID) (io.ReadCloser, error) {
panic("not implement yet!")
}
+// GetPackageObjects 获取包中的对象列表。
+// userID: 用户ID。
+// packageID: 包ID。
+// 返回值: 对象列表和错误信息。
func (svc *ObjectService) GetPackageObjects(userID cdssdk.UserID, packageID cdssdk.PackageID) ([]model.Object, error) {
- coorCli, err := stgglb.CoordinatorMQPool.Acquire()
+ coorCli, err := stgglb.CoordinatorMQPool.Acquire() // 获取协调器客户端
if err != nil {
return nil, fmt.Errorf("new coordinator client: %w", err)
}
- defer stgglb.CoordinatorMQPool.Release(coorCli)
+ defer stgglb.CoordinatorMQPool.Release(coorCli) // 释放协调器客户端资源
- getResp, err := coorCli.GetPackageObjects(coormq.NewGetPackageObjects(userID, packageID))
+ getResp, err := coorCli.GetPackageObjects(coormq.NewGetPackageObjects(userID, packageID)) // 请求协调器获取套餐对象
if err != nil {
- return nil, fmt.Errorf("requsting to coodinator: %w", err)
+ return nil, fmt.Errorf("requesting to coordinator: %w", err)
}
return getResp.Objects, nil
diff --git a/client/internal/services/package.go b/client/internal/services/package.go
index 9667356..518486e 100644
--- a/client/internal/services/package.go
+++ b/client/internal/services/package.go
@@ -11,21 +11,26 @@ import (
coormq "gitlink.org.cn/cloudream/storage/common/pkgs/mq/coordinator"
)
+// PackageService 提供对包相关操作的服务接口
type PackageService struct {
*Service
}
+// PackageSvc 创建并返回一个PackageService的实例
func (svc *Service) PackageSvc() *PackageService {
return &PackageService{Service: svc}
}
+// Get 获取指定用户的指定包信息
func (svc *PackageService) Get(userID cdssdk.UserID, packageID cdssdk.PackageID) (*model.Package, error) {
+ // 从协调器MQ池中获取客户端
coorCli, err := stgglb.CoordinatorMQPool.Acquire()
if err != nil {
return nil, fmt.Errorf("new coordinator client: %w", err)
}
defer stgglb.CoordinatorMQPool.Release(coorCli)
+ // 向协调器请求获取包信息
getResp, err := coorCli.GetPackage(coormq.NewGetPackage(userID, packageID))
if err != nil {
return nil, fmt.Errorf("requsting to coodinator: %w", err)
@@ -34,13 +39,16 @@ func (svc *PackageService) Get(userID cdssdk.UserID, packageID cdssdk.PackageID)
return &getResp.Package, nil
}
+// Create 创建一个新的包
func (svc *PackageService) Create(userID cdssdk.UserID, bucketID cdssdk.BucketID, name string) (cdssdk.PackageID, error) {
+ // 从协调器MQ池中获取客户端
coorCli, err := stgglb.CoordinatorMQPool.Acquire()
if err != nil {
return 0, fmt.Errorf("new coordinator client: %w", err)
}
defer stgglb.CoordinatorMQPool.Release(coorCli)
+ // 向协调器发送创建包的请求
resp, err := coorCli.CreatePackage(coormq.NewCreatePackage(userID, bucketID, name))
if err != nil {
return 0, fmt.Errorf("creating package: %w", err)
@@ -49,18 +57,22 @@ func (svc *PackageService) Create(userID cdssdk.UserID, bucketID cdssdk.BucketID
return resp.PackageID, nil
}
+// DownloadPackage 下载指定包的内容
func (svc *PackageService) DownloadPackage(userID cdssdk.UserID, packageID cdssdk.PackageID) (iterator.DownloadingObjectIterator, error) {
+ // 从协调器MQ池中获取客户端
coorCli, err := stgglb.CoordinatorMQPool.Acquire()
if err != nil {
return nil, fmt.Errorf("new coordinator client: %w", err)
}
defer stgglb.CoordinatorMQPool.Release(coorCli)
+ // 向协调器请求获取包内对象的详情
getObjsResp, err := coorCli.GetPackageObjectDetails(coormq.NewGetPackageObjectDetails(packageID))
if err != nil {
return nil, fmt.Errorf("getting package object details: %w", err)
}
+ // 创建下载对象的迭代器
iter := iterator.NewDownloadObjectIterator(getObjsResp.Objects, &iterator.DownloadContext{
Distlock: svc.DistLock,
})
@@ -68,13 +80,16 @@ func (svc *PackageService) DownloadPackage(userID cdssdk.UserID, packageID cdssd
return iter, nil
}
+// DeletePackage 删除指定的包
func (svc *PackageService) DeletePackage(userID cdssdk.UserID, packageID cdssdk.PackageID) error {
+ // 从协调器MQ池中获取客户端
coorCli, err := stgglb.CoordinatorMQPool.Acquire()
if err != nil {
return fmt.Errorf("new coordinator client: %w", err)
}
defer stgglb.CoordinatorMQPool.Release(coorCli)
+ // 向协调器发送删除包的请求
_, err = coorCli.DeletePackage(coormq.NewDeletePackage(userID, packageID))
if err != nil {
return fmt.Errorf("deleting package: %w", err)
@@ -83,18 +98,22 @@ func (svc *PackageService) DeletePackage(userID cdssdk.UserID, packageID cdssdk.
return nil
}
+// GetCachedNodes 获取指定包的缓存节点信息
func (svc *PackageService) GetCachedNodes(userID cdssdk.UserID, packageID cdssdk.PackageID) (cdssdk.PackageCachingInfo, error) {
+ // 从协调器MQ池中获取客户端
coorCli, err := stgglb.CoordinatorMQPool.Acquire()
if err != nil {
return cdssdk.PackageCachingInfo{}, fmt.Errorf("new coordinator client: %w", err)
}
defer stgglb.CoordinatorMQPool.Release(coorCli)
+ // 向协调器请求获取包的缓存节点信息
resp, err := coorCli.GetPackageCachedNodes(coormq.NewGetPackageCachedNodes(userID, packageID))
if err != nil {
return cdssdk.PackageCachingInfo{}, fmt.Errorf("get package cached nodes: %w", err)
}
+ // 构造并返回缓存信息
tmp := cdssdk.PackageCachingInfo{
NodeInfos: resp.NodeInfos,
PackageSize: resp.PackageSize,
@@ -102,13 +121,16 @@ func (svc *PackageService) GetCachedNodes(userID cdssdk.UserID, packageID cdssdk
return tmp, nil
}
+// GetLoadedNodes 获取指定包加载的节点列表
func (svc *PackageService) GetLoadedNodes(userID cdssdk.UserID, packageID cdssdk.PackageID) ([]cdssdk.NodeID, error) {
+ // 从协调器MQ池中获取客户端
coorCli, err := stgglb.CoordinatorMQPool.Acquire()
if err != nil {
return nil, fmt.Errorf("new coordinator client: %w", err)
}
defer stgglb.CoordinatorMQPool.Release(coorCli)
+ // 向协调器请求获取加载指定包的节点ID列表
resp, err := coorCli.GetPackageLoadedNodes(coormq.NewGetPackageLoadedNodes(userID, packageID))
if err != nil {
return nil, fmt.Errorf("get package loaded nodes: %w", err)
diff --git a/client/internal/services/scanner.go b/client/internal/services/scanner.go
index fa50ba5..0186e7d 100644
--- a/client/internal/services/scanner.go
+++ b/client/internal/services/scanner.go
@@ -8,21 +8,31 @@ import (
scevt "gitlink.org.cn/cloudream/storage/common/pkgs/mq/scanner/event"
)
+// ScannerService 是扫描器服务结构体,封装了与扫描器相关的服务功能。
type ScannerService struct {
*Service
}
+// ScannerSvc 返回ScannerService的一个实例,提供扫描器服务。
func (svc *Service) ScannerSvc() *ScannerService {
return &ScannerService{Service: svc}
}
+// PostEvent 执行数据巡查事件
+// event: 需要发送的事件对象。
+// isEmergency: 是否为紧急事件,影响事件处理的优先级。
+// dontMerge: 是否禁止将该事件与其它事件合并处理。
+// 返回值: 发送事件过程中遇到的错误。
func (svc *ScannerService) PostEvent(event scevt.Event, isEmergency bool, dontMerge bool) error {
+ // 从扫描器消息池中获取客户端实例
scCli, err := stgglb.ScannerMQPool.Acquire()
if err != nil {
- return fmt.Errorf("new scacnner client: %w", err)
+ return fmt.Errorf("new scanner client: %w", err)
}
+ // 确保扫描器客户端在函数返回前被释放
defer stgglb.ScannerMQPool.Release(scCli)
+ // 向扫描器客户端发送事件
err = scCli.PostEvent(scmq.NewPostEvent(event, isEmergency, dontMerge))
if err != nil {
return fmt.Errorf("request to scanner failed, err: %w", err)
diff --git a/client/internal/services/service.go b/client/internal/services/service.go
index 226a616..cf7770a 100644
--- a/client/internal/services/service.go
+++ b/client/internal/services/service.go
@@ -1,15 +1,29 @@
+// services 包提供了服务层的封装,主要负责协调分布锁和任务管理器之间的交互。
+
package services
import (
- "gitlink.org.cn/cloudream/common/pkgs/distlock"
- "gitlink.org.cn/cloudream/storage/client/internal/task"
+ "gitlink.org.cn/cloudream/common/pkgs/distlock" // 导入分布锁服务包
+ "gitlink.org.cn/cloudream/storage/client/internal/task" // 导入任务管理服务包
)
+// Service 结构体封装了分布锁服务和任务管理服务。
type Service struct {
- DistLock *distlock.Service
- TaskMgr *task.Manager
+ DistLock *distlock.Service // DistLock 用于分布式环境下的锁服务
+ TaskMgr *task.Manager // TaskMgr 用于任务的创建、管理和执行
}
+// NewService 创建一个新的Service实例。
+//
+// 参数:
+//
+// distlock *distlock.Service: 分布式锁服务的实例。
+// taskMgr *task.Manager: 任务管理器的实例。
+//
+// 返回值:
+//
+// *Service: 初始化后的Service实例。
+// error: 如果创建过程中遇到错误,则返回错误信息,否则为nil。
func NewService(distlock *distlock.Service, taskMgr *task.Manager) (*Service, error) {
return &Service{
DistLock: distlock,
diff --git a/client/internal/services/storage.go b/client/internal/services/storage.go
index d5671a4..57d71f0 100644
--- a/client/internal/services/storage.go
+++ b/client/internal/services/storage.go
@@ -2,42 +2,55 @@ package services
import (
"fmt"
+ "gitlink.org.cn/cloudream/storage/common/pkgs/db/model"
"time"
cdssdk "gitlink.org.cn/cloudream/common/sdks/storage"
stgglb "gitlink.org.cn/cloudream/storage/common/globals"
- "gitlink.org.cn/cloudream/storage/common/pkgs/db/model"
agtmq "gitlink.org.cn/cloudream/storage/common/pkgs/mq/agent"
coormq "gitlink.org.cn/cloudream/storage/common/pkgs/mq/coordinator"
)
+// StorageService 存储服务结构体,继承自Service结构体
type StorageService struct {
*Service
}
+// StorageSvc 返回StorageService的实例
func (svc *Service) StorageSvc() *StorageService {
return &StorageService{Service: svc}
}
+// StartStorageLoadPackage 开始加载存储包。
+// userID: 用户ID,用于标识请求的用户。
+// packageID: 包ID,用于标识需要加载的数据包。
+// storageID: 存储ID,用于标识数据存储的位置。
+// 返回值1: 节点ID,标识进行存储操作的节点。
+// 返回值2: 任务ID,标识加载数据包的任务。
+// 返回值3: 错误,如果执行过程中出现错误,则返回错误信息。
func (svc *StorageService) StartStorageLoadPackage(userID cdssdk.UserID, packageID cdssdk.PackageID, storageID cdssdk.StorageID) (cdssdk.NodeID, string, error) {
+ // 获取协调器MQ客户端
coorCli, err := stgglb.CoordinatorMQPool.Acquire()
if err != nil {
return 0, "", fmt.Errorf("new coordinator client: %w", err)
}
defer stgglb.CoordinatorMQPool.Release(coorCli)
+ // 从协调器获取存储信息
stgResp, err := coorCli.GetStorageInfo(coormq.NewGetStorageInfo(userID, storageID))
if err != nil {
return 0, "", fmt.Errorf("getting storage info: %w", err)
}
+ // 获取代理MQ客户端
agentCli, err := stgglb.AgentMQPool.Acquire(stgResp.NodeID)
if err != nil {
return 0, "", fmt.Errorf("new agent client: %w", err)
}
defer stgglb.AgentMQPool.Release(agentCli)
+ // 向代理发送开始加载存储包的请求
startResp, err := agentCli.StartStorageLoadPackage(agtmq.NewStartStorageLoadPackage(userID, packageID, storageID))
if err != nil {
return 0, "", fmt.Errorf("start storage load package: %w", err)
@@ -46,6 +59,17 @@ func (svc *StorageService) StartStorageLoadPackage(userID cdssdk.UserID, package
return stgResp.NodeID, startResp.TaskID, nil
}
+/*
+WaitStorageLoadPackage 等待存储包加载完成。
+参数:
+- nodeID:节点ID
+- taskID:任务ID
+- waitTimeout:等待超时时间
+返回值:
+- bool:任务是否完成
+- string:错误信息
+- error:错误信息
+*/
func (svc *StorageService) WaitStorageLoadPackage(nodeID cdssdk.NodeID, taskID string, waitTimeout time.Duration) (bool, string, error) {
agentCli, err := stgglb.AgentMQPool.Acquire(nodeID)
if err != nil {
@@ -71,12 +95,26 @@ func (svc *StorageService) WaitStorageLoadPackage(nodeID cdssdk.NodeID, taskID s
return true, waitResp.FullPath, nil
}
+// DeleteStoragePackage 删除存储包的函数,当前未实现。
func (svc *StorageService) DeleteStoragePackage(userID int64, packageID int64, storageID int64) error {
// TODO
panic("not implement yet")
}
-// 请求节点启动从Storage中上传文件的任务。会返回节点ID和任务ID
+/*
+StartStorageCreatePackage 请求节点启动从Storage中上传文件的任务。
+参数:
+- userID:用户ID
+- bucketID:存储桶ID
+- name:文件名
+- storageID:存储ID
+- path:文件路径
+- nodeAffinity:节点亲和性(可选)
+返回值:
+- cdssdk.NodeID:节点ID
+- string:任务ID
+- error:错误信息
+*/
func (svc *StorageService) StartStorageCreatePackage(userID cdssdk.UserID, bucketID cdssdk.BucketID, name string, storageID cdssdk.StorageID, path string, nodeAffinity *cdssdk.NodeID) (cdssdk.NodeID, string, error) {
coorCli, err := stgglb.CoordinatorMQPool.Acquire()
if err != nil {
@@ -103,6 +141,17 @@ func (svc *StorageService) StartStorageCreatePackage(userID cdssdk.UserID, bucke
return stgResp.NodeID, startResp.TaskID, nil
}
+/*
+WaitStorageCreatePackage 等待存储包创建完成。
+参数:
+- nodeID:节点ID
+- taskID:任务ID
+- waitTimeout:等待超时时间
+返回值:
+- bool:任务是否完成
+- cdssdk.PackageID:包ID
+- error:错误信息
+*/
func (svc *StorageService) WaitStorageCreatePackage(nodeID cdssdk.NodeID, taskID string, waitTimeout time.Duration) (bool, cdssdk.PackageID, error) {
agentCli, err := stgglb.AgentMQPool.Acquire(nodeID)
if err != nil {
@@ -128,6 +177,14 @@ func (svc *StorageService) WaitStorageCreatePackage(nodeID cdssdk.NodeID, taskID
return true, waitResp.PackageID, nil
}
+/*
+GetInfo 获取存储信息。
+参数:
+- userID:用户ID
+- storageID:存储ID
+返回值:
+-
+*/
func (svc *StorageService) GetInfo(userID cdssdk.UserID, storageID cdssdk.StorageID) (*model.Storage, error) {
coorCli, err := stgglb.CoordinatorMQPool.Acquire()
if err != nil {
diff --git a/client/internal/task/task.go b/client/internal/task/task.go
index a1ec9f6..7d25c40 100644
--- a/client/internal/task/task.go
+++ b/client/internal/task/task.go
@@ -1,28 +1,34 @@
package task
import (
- "gitlink.org.cn/cloudream/common/pkgs/distlock"
- "gitlink.org.cn/cloudream/common/pkgs/task"
- "gitlink.org.cn/cloudream/storage/common/pkgs/connectivity"
+ "gitlink.org.cn/cloudream/common/pkgs/distlock" // 引入分布式锁服务
+ "gitlink.org.cn/cloudream/common/pkgs/task" // 引入任务处理相关的包
+ "gitlink.org.cn/cloudream/storage/common/pkgs/connectivity" // 引入网络连接状态收集器
)
+// TaskContext 定义了任务执行的上下文环境,包含分布式锁服务和网络连接状态收集器
type TaskContext struct {
distlock *distlock.Service
connectivity *connectivity.Collector
}
-// 需要在Task结束后主动调用,completing函数将在Manager加锁期间被调用,
-// 因此适合进行执行结果的设置
+// CompleteFn 类型定义了任务完成时的回调函数,用于设置任务的执行结果
type CompleteFn = task.CompleteFn
+// Manager 类型定义了任务管理器,用于创建、管理和调度任务
type Manager = task.Manager[TaskContext]
+// TaskBody 类型定义了任务的主体部分,包含了任务实际执行的逻辑
type TaskBody = task.TaskBody[TaskContext]
+// Task 类型定义了具体的任务,包括任务的上下文、主体和完成选项
type Task = task.Task[TaskContext]
+// CompleteOption 类型定义了任务完成时的选项,可用于定制任务完成的处理方式
type CompleteOption = task.CompleteOption
+// NewManager 创建一个新的任务管理器实例,接受一个分布式锁服务和一个网络连接状态收集器作为参数
+// 返回一个初始化好的任务管理器实例
func NewManager(distlock *distlock.Service, connectivity *connectivity.Collector) Manager {
return task.NewManager(TaskContext{
distlock: distlock,
diff --git a/client/internal/task/upload_objects.go b/client/internal/task/upload_objects.go
index ca0f8a6..2f8fae6 100644
--- a/client/internal/task/upload_objects.go
+++ b/client/internal/task/upload_objects.go
@@ -1,37 +1,51 @@
+// package task 定义了与任务处理相关的结构体和函数。
package task
import (
"time"
- "gitlink.org.cn/cloudream/common/pkgs/task"
- cdssdk "gitlink.org.cn/cloudream/common/sdks/storage"
- "gitlink.org.cn/cloudream/storage/common/pkgs/cmd"
- "gitlink.org.cn/cloudream/storage/common/pkgs/iterator"
+ "gitlink.org.cn/cloudream/common/pkgs/task" // 引入task包,提供任务处理的通用功能。
+ cdssdk "gitlink.org.cn/cloudream/common/sdks/storage" // 引入cdssdk包,提供云存储相关的SDK接口。
+ "gitlink.org.cn/cloudream/storage/common/pkgs/cmd" // 引入cmd包,提供命令执行相关的功能。
+ "gitlink.org.cn/cloudream/storage/common/pkgs/iterator" // 引入iterator包,提供迭代器相关的功能。
)
+// UploadObjectsResult 定义了上传对象结果的类型,继承自cmd包的UploadObjectsResult类型。
type UploadObjectsResult = cmd.UploadObjectsResult
+// UploadObjects 定义了上传对象的任务结构体,包含上传命令和执行结果。
type UploadObjects struct {
- cmd cmd.UploadObjects
+ cmd cmd.UploadObjects // cmd字段定义了上传对象的具体操作。
- Result *UploadObjectsResult
+ Result *UploadObjectsResult // Result字段存储上传对象操作的结果。
}
+// NewUploadObjects 创建并返回一个新的UploadObjects实例。
+// userID: 用户ID,标识发起上传请求的用户。
+// packageID: 包ID,标识被上传的对象所属的包。
+// objectIter: 上传对象迭代器,用于遍历和上传多个对象。
+// nodeAffinity: 节点亲和性,指定上传任务首选的执行节点。
+// 返回值为初始化后的UploadObjects指针。
func NewUploadObjects(userID cdssdk.UserID, packageID cdssdk.PackageID, objectIter iterator.UploadingObjectIterator, nodeAffinity *cdssdk.NodeID) *UploadObjects {
return &UploadObjects{
cmd: *cmd.NewUploadObjects(userID, packageID, objectIter, nodeAffinity),
}
}
+// Execute 执行上传对象的任务。
+// task: 任务实例,包含任务的上下文信息。
+// ctx: 任务执行的上下文,包括分布式锁和网络连接性等信息。
+// complete: 任务完成时的回调函数。
+// 该函数负责调用上传命令的Execute方法,处理上传结果,并通过回调函数报告任务完成情况。
func (t *UploadObjects) Execute(task *task.Task[TaskContext], ctx TaskContext, complete CompleteFn) {
ret, err := t.cmd.Execute(&cmd.UploadObjectsContext{
- Distlock: ctx.distlock,
- Connectivity: ctx.connectivity,
+ Distlock: ctx.distlock, // 使用任务上下文中的分布式锁。
+ Connectivity: ctx.connectivity, // 使用任务上下文中的网络连接性信息。
})
- t.Result = ret
+ t.Result = ret // 存储上传结果。
complete(err, CompleteOption{
- RemovingDelay: time.Minute,
+ RemovingDelay: time.Minute, // 设置任务完成后的清理延迟为1分钟。
})
}
diff --git a/client/main.go b/client/main.go
index 101b013..02cdeea 100644
--- a/client/main.go
+++ b/client/main.go
@@ -16,56 +16,87 @@ import (
"gitlink.org.cn/cloudream/storage/common/pkgs/distlock"
)
+/*
+该Go程序是一个客户端应用程序,主要负责初始化配置、日志、全局变量,并启动网络检测、分布式锁服务、任务管理器和服务处理客户端请求。具体功能如下:
+程序的主入口函数main():
+初始化配置,如果失败则结束进程。
+初始化日志系统,如果失败则结束进程。
+初始化全局变量,包括本地配置、消息队列池和Agent RPC池。
+根据IPFS配置初始化IPFS客户端。
+启动网络连通性检测。
+启动分布式锁服务,并在独立的goroutine中运行。
+创建任务管理器。
+创建服务实例。
+创建命令行接口。
+分发命令行指令。
+辅助函数serveDistLock():
+在独立的goroutine中启动分布式锁服务。
+处理服务停止时的错误。
+该程序使用了多个外部包和模块,包括配置管理、日志系统、全局变量初始化、网络检测、分布式锁服务、任务管理和命令行接口等。这些模块共同协作,提供了一个功能丰富的客户端应用程序。
+*/
+
+// @Description: 程序的主入口函数,负责初始化配置、日志、全局变量,并启动网络检测、分布式锁服务、任务管理器和服务处理客户端请求。
func main() {
+ // 初始化配置,失败则结束进程
err := config.Init()
if err != nil {
fmt.Printf("init config failed, err: %s", err.Error())
os.Exit(1)
}
+ // 初始化日志系统
err = logger.Init(&config.Cfg().Logger)
if err != nil {
fmt.Printf("init logger failed, err: %s", err.Error())
os.Exit(1)
}
+ // 初始化全局变量
stgglb.InitLocal(&config.Cfg().Local)
stgglb.InitMQPool(&config.Cfg().RabbitMQ)
stgglb.InitAgentRPCPool(&config.Cfg().AgentGRPC)
+ // 如果IPFS配置非空,初始化IPFS客户端
if config.Cfg().IPFS != nil {
logger.Infof("IPFS config is not empty, so create a ipfs client")
-
stgglb.InitIPFSPool(config.Cfg().IPFS)
}
- // 启动网络连通性检测,并就地检测一次
+ // 启动网络连通性检测
conCol := connectivity.NewCollector(&config.Cfg().Connectivity, nil)
conCol.CollectInPlace()
+ // 启动分布式锁服务
distlockSvc, err := distlock.NewService(&config.Cfg().DistLock)
if err != nil {
logger.Warnf("new distlock service failed, err: %s", err.Error())
os.Exit(1)
}
- go serveDistLock(distlockSvc)
+ go serveDistLock(distlockSvc) // 在goroutine中运行分布式锁服务
+ // 创建任务管理器
taskMgr := task.NewManager(distlockSvc, &conCol)
+ // 创建服务实例
svc, err := services.NewService(distlockSvc, &taskMgr)
if err != nil {
logger.Warnf("new services failed, err: %s", err.Error())
os.Exit(1)
}
+ // 创建命令行接口
cmds, err := cmdline.NewCommandline(svc)
if err != nil {
logger.Warnf("new command line failed, err: %s", err.Error())
os.Exit(1)
}
+ // 分发命令行指令
cmds.DispatchCommand(os.Args[1:])
}
+// serveDistLock 启动分布式锁服务
+//
+// @Description: 在独立的goroutine中启动分布式锁服务,并处理服务停止时的错误。
func serveDistLock(svc *distlock.Service) {
logger.Info("start serving distlock")
diff --git a/common/globals/globals.go b/common/globals/globals.go
index 9bb2013..27f236c 100644
--- a/common/globals/globals.go
+++ b/common/globals/globals.go
@@ -6,6 +6,10 @@ import (
var Local *stgmodels.LocalMachineInfo
+// InitLocal
+//
+// @Description: 初始化本地机器信息
+// @param info
func InitLocal(info *stgmodels.LocalMachineInfo) {
Local = info
}
diff --git a/common/globals/pools.go b/common/globals/pools.go
index 0a09378..196458b 100644
--- a/common/globals/pools.go
+++ b/common/globals/pools.go
@@ -15,6 +15,10 @@ var CoordinatorMQPool coormq.Pool
var ScannerMQPool scmq.Pool
+// InitMQPool
+//
+// @Description: 初始化MQ连接池
+// @param cfg
func InitMQPool(cfg *stgmq.Config) {
AgentMQPool = agtmq.NewPool(cfg)
@@ -25,6 +29,10 @@ func InitMQPool(cfg *stgmq.Config) {
var AgentRPCPool *agtrpc.Pool
+// InitAgentRPCPool
+//
+// @Description: 初始化AgentRPC连接池
+// @param cfg
func InitAgentRPCPool(cfg *agtrpc.PoolConfig) {
AgentRPCPool = agtrpc.NewPool(cfg)
}
diff --git a/common/pkgs/cmd/download_package.go b/common/pkgs/cmd/download_package.go
index a8e5914..3adb2bd 100644
--- a/common/pkgs/cmd/download_package.go
+++ b/common/pkgs/cmd/download_package.go
@@ -14,16 +14,22 @@ import (
coormq "gitlink.org.cn/cloudream/storage/common/pkgs/mq/coordinator"
)
+// 下载包结构体,存储用户ID、包ID和输出路径。
type DownloadPackage struct {
userID cdssdk.UserID
packageID cdssdk.PackageID
outputPath string
}
+// 下载包执行上下文,包含分布式锁服务。
type DownloadPackageContext struct {
Distlock *distlock.Service
}
+// 新建一个下载包实例。
+// userID: 用户标识。
+// packageID: 包标识。
+// outputPath: 输出路径。
func NewDownloadPackage(userID cdssdk.UserID, packageID cdssdk.PackageID, outputPath string) *DownloadPackage {
return &DownloadPackage{
userID: userID,
@@ -32,53 +38,63 @@ func NewDownloadPackage(userID cdssdk.UserID, packageID cdssdk.PackageID, output
}
}
+// 执行下载包操作。
+// ctx: 下载包执行上下文。
+// 返回值: 执行过程中可能出现的错误。
func (t *DownloadPackage) Execute(ctx *DownloadPackageContext) error {
+ // 获取协调器MQ客户端
coorCli, err := stgglb.CoordinatorMQPool.Acquire()
if err != nil {
return fmt.Errorf("new coordinator client: %w", err)
}
- defer stgglb.CoordinatorMQPool.Release(coorCli)
+ defer stgglb.CoordinatorMQPool.Release(coorCli) // 确保释放客户端资源
+ // 获取包内对象详情
getObjectDetails, err := coorCli.GetPackageObjectDetails(coormq.NewGetPackageObjectDetails(t.packageID))
if err != nil {
return fmt.Errorf("getting package object details: %w", err)
}
+ // 创建下载对象迭代器
objIter := iterator.NewDownloadObjectIterator(getObjectDetails.Objects, &iterator.DownloadContext{
Distlock: ctx.Distlock,
})
- defer objIter.Close()
+ defer objIter.Close() // 确保迭代器关闭
+ // 写入对象数据到本地
return t.writeObjects(objIter)
}
+// 将下载的对象写入本地文件系统。
+// objIter: 下载中的对象迭代器。
+// 返回值: 写入过程中可能出现的错误。
func (t *DownloadPackage) writeObjects(objIter iterator.DownloadingObjectIterator) error {
for {
objInfo, err := objIter.MoveNext()
if err == iterator.ErrNoMoreItem {
- break
+ break // 没有更多对象时结束循环
}
if err != nil {
return err
}
err = func() error {
- defer objInfo.File.Close()
+ defer objInfo.File.Close() // 确保文件资源被释放
- fullPath := filepath.Join(t.outputPath, objInfo.Object.Path)
+ fullPath := filepath.Join(t.outputPath, objInfo.Object.Path) // 计算文件完整路径
- dirPath := filepath.Dir(fullPath)
- if err := os.MkdirAll(dirPath, 0755); err != nil {
+ dirPath := filepath.Dir(fullPath) // 获取文件所在目录路径
+ if err := os.MkdirAll(dirPath, 0755); err != nil { // 创建目录,如果不存在
return fmt.Errorf("creating object dir: %w", err)
}
- outputFile, err := os.Create(fullPath)
+ outputFile, err := os.Create(fullPath) // 创建本地文件
if err != nil {
return fmt.Errorf("creating object file: %w", err)
}
- defer outputFile.Close()
+ defer outputFile.Close() // 确保文件关闭
- _, err = io.Copy(outputFile, objInfo.File)
+ _, err = io.Copy(outputFile, objInfo.File) // 将对象数据写入本地文件
if err != nil {
return fmt.Errorf("copy object data to local file failed, err: %w", err)
}
@@ -86,9 +102,9 @@ func (t *DownloadPackage) writeObjects(objIter iterator.DownloadingObjectIterato
return nil
}()
if err != nil {
- return err
+ return err // 如果写入过程中出现错误,返回该错误
}
}
- return nil
+ return nil // 没有错误,返回nil
}
diff --git a/common/pkgs/cmd/upload_objects.go b/common/pkgs/cmd/upload_objects.go
index 59a66bc..058b8d4 100644
--- a/common/pkgs/cmd/upload_objects.go
+++ b/common/pkgs/cmd/upload_objects.go
@@ -22,6 +22,7 @@ import (
coormq "gitlink.org.cn/cloudream/storage/common/pkgs/mq/coordinator"
)
+// UploadObjects 上传对象的结构体,包含上传所需的用户ID、包ID、对象迭代器和节点亲和性信息。
type UploadObjects struct {
userID cdssdk.UserID
packageID cdssdk.PackageID
@@ -29,10 +30,12 @@ type UploadObjects struct {
nodeAffinity *cdssdk.NodeID
}
+// UploadObjectsResult 上传对象结果的结构体,包含上传结果的数组。
type UploadObjectsResult struct {
Objects []ObjectUploadResult
}
+// ObjectUploadResult 单个对象上传结果的结构体,包含上传信息、错误和对象ID。
type ObjectUploadResult struct {
Info *iterator.IterUploadingObject
Error error
@@ -40,17 +43,20 @@ type ObjectUploadResult struct {
ObjectID cdssdk.ObjectID
}
+// UploadNodeInfo 上传节点信息的结构体,包含节点信息、延迟、是否与客户端在同一位置。
type UploadNodeInfo struct {
Node cdssdk.Node
Delay time.Duration
IsSameLocation bool
}
+// UploadObjectsContext 上传对象上下文的结构体,包含分布式锁服务和连通性收集器。
type UploadObjectsContext struct {
Distlock *distlock.Service
Connectivity *connectivity.Collector
}
+// NewUploadObjects 创建一个新的UploadObjects实例。
func NewUploadObjects(userID cdssdk.UserID, packageID cdssdk.PackageID, objIter iterator.UploadingObjectIterator, nodeAffinity *cdssdk.NodeID) *UploadObjects {
return &UploadObjects{
userID: userID,
@@ -60,19 +66,23 @@ func NewUploadObjects(userID cdssdk.UserID, packageID cdssdk.PackageID, objIter
}
}
+// Execute 执行上传对象的操作。
func (t *UploadObjects) Execute(ctx *UploadObjectsContext) (*UploadObjectsResult, error) {
defer t.objectIter.Close()
+ // 获取协调器客户端
coorCli, err := stgglb.CoordinatorMQPool.Acquire()
if err != nil {
return nil, fmt.Errorf("new coordinator client: %w", err)
}
+ // 获取用户节点信息
getUserNodesResp, err := coorCli.GetUserNodes(coormq.NewGetUserNodes(t.userID))
if err != nil {
return nil, fmt.Errorf("getting user nodes: %w", err)
}
+ // 获取节点连通性信息
cons := ctx.Connectivity.GetAll()
userNodes := lo.Map(getUserNodesResp.Nodes, func(node cdssdk.Node, index int) UploadNodeInfo {
delay := time.Duration(math.MaxInt64)
@@ -92,9 +102,8 @@ func (t *UploadObjects) Execute(ctx *UploadObjectsContext) (*UploadObjectsResult
return nil, fmt.Errorf("user no available nodes")
}
- // 给上传节点的IPFS加锁
+ // 对上传节点的IPFS加锁
ipfsReqBlder := reqbuilder.NewBuilder()
- // 如果本地的IPFS也是存储系统的一个节点,那么从本地上传时,需要加锁
if stgglb.Local.NodeID != nil {
ipfsReqBlder.IPFS().Buzy(*stgglb.Local.NodeID)
}
@@ -105,14 +114,15 @@ func (t *UploadObjects) Execute(ctx *UploadObjectsContext) (*UploadObjectsResult
ipfsReqBlder.IPFS().Buzy(node.Node.NodeID)
}
- // TODO 考虑加Object的Create锁
- // 防止上传的副本被清除
+
+ // 获得IPFS锁
ipfsMutex, err := ipfsReqBlder.MutexLock(ctx.Distlock)
if err != nil {
return nil, fmt.Errorf("acquire locks failed, err: %w", err)
}
defer ipfsMutex.Unlock()
+ // 上传并更新包信息
rets, err := uploadAndUpdatePackage(t.packageID, t.objectIter, userNodes, t.nodeAffinity)
if err != nil {
return nil, err
@@ -123,10 +133,8 @@ func (t *UploadObjects) Execute(ctx *UploadObjectsContext) (*UploadObjectsResult
}, nil
}
-// chooseUploadNode 选择一个上传文件的节点
-// 1. 选择设置了亲和性的节点
-// 2. 从与当前客户端相同地域的节点中随机选一个
-// 3. 没有的话从所有节点选择延迟最低的节点
+// chooseUploadNode 选择一个上传文件的节点。
+// 首先选择设置了亲和性的节点,然后从与当前客户端相同地域的节点中随机选择一个,最后选择延迟最低的节点。
func chooseUploadNode(nodes []UploadNodeInfo, nodeAffinity *cdssdk.NodeID) UploadNodeInfo {
if nodeAffinity != nil {
aff, ok := lo.Find(nodes, func(node UploadNodeInfo) bool { return node.Node.NodeID == *nodeAffinity })
@@ -146,49 +154,68 @@ func chooseUploadNode(nodes []UploadNodeInfo, nodeAffinity *cdssdk.NodeID) Uploa
return nodes[0]
}
+// uploadAndUpdatePackage 上传文件并更新包信息。
+// packageID:标识待更新的包的ID。
+// objectIter:提供上传对象迭代器,用于遍历上传的文件。
+// userNodes:用户可选的上传节点信息列表。
+// nodeAffinity:用户首选的上传节点。
+// 返回值:上传结果列表和错误信息。
func uploadAndUpdatePackage(packageID cdssdk.PackageID, objectIter iterator.UploadingObjectIterator, userNodes []UploadNodeInfo, nodeAffinity *cdssdk.NodeID) ([]ObjectUploadResult, error) {
+ // 获取协调器客户端
coorCli, err := stgglb.CoordinatorMQPool.Acquire()
if err != nil {
return nil, fmt.Errorf("new coordinator client: %w", err)
}
defer stgglb.CoordinatorMQPool.Release(coorCli)
- // 为所有文件选择相同的上传节点
+ // 选择上传节点
uploadNode := chooseUploadNode(userNodes, nodeAffinity)
var uploadRets []ObjectUploadResult
- //上传文件夹
+ // 构建添加对象的列表
var adds []coormq.AddObjectEntry
for {
+ // 获取下一个对象信息。如果不存在更多对象,则退出循环。
objInfo, err := objectIter.MoveNext()
if err == iterator.ErrNoMoreItem {
break
}
if err != nil {
+ // 对象获取发生错误,返回错误信息。
return nil, fmt.Errorf("reading object: %w", err)
}
+
+ // 执行上传逻辑,每个对象依次执行。
err = func() error {
+ // 确保对象文件在函数退出时关闭。
defer objInfo.File.Close()
+ // 记录上传开始时间。
uploadTime := time.Now()
+ // 上传文件,并获取文件哈希值。
fileHash, err := uploadFile(objInfo.File, uploadNode)
if err != nil {
+ // 文件上传失败,记录错误信息并返回。
return fmt.Errorf("uploading file: %w", err)
}
+ // 收集上传结果。
uploadRets = append(uploadRets, ObjectUploadResult{
Info: objInfo,
Error: err,
})
+ // 准备添加到队列的条目,以供后续处理。
adds = append(adds, coormq.NewAddObjectEntry(objInfo.Path, objInfo.Size, fileHash, uploadTime, uploadNode.Node.NodeID))
return nil
}()
if err != nil {
+ // 上传操作中出现错误,返回错误信息。
return nil, err
}
}
+ // 更新包信息
_, err = coorCli.UpdatePackage(coormq.NewUpdatePackage(packageID, adds, nil))
if err != nil {
return nil, fmt.Errorf("updating package: %w", err)
@@ -197,29 +224,29 @@ func uploadAndUpdatePackage(packageID cdssdk.PackageID, objectIter iterator.Uplo
return uploadRets, nil
}
+// uploadFile 上传文件。
+// file:待上传的文件流。
+// uploadNode:指定的上传节点信息。
+// 返回值:文件哈希和错误信息。
func uploadFile(file io.Reader, uploadNode UploadNodeInfo) (string, error) {
- // 本地有IPFS,则直接从本地IPFS上传
+ // 尝试使用本地IPFS上传
if stgglb.IPFSPool != nil {
logger.Infof("try to use local IPFS to upload file")
- // 只有本地IPFS不是存储系统中的一个节点,才需要Pin文件
fileHash, err := uploadToLocalIPFS(file, uploadNode.Node.NodeID, stgglb.Local.NodeID == nil)
if err == nil {
return fileHash, nil
-
} else {
logger.Warnf("upload to local IPFS failed, so try to upload to node %d, err: %s", uploadNode.Node.NodeID, err.Error())
}
}
- // 否则发送到agent上传
- // 如果客户端与节点在同一个地域,则使用内网地址连接节点
+ // 否则,发送到agent进行上传
nodeIP := uploadNode.Node.ExternalIP
grpcPort := uploadNode.Node.ExternalGRPCPort
if uploadNode.IsSameLocation {
nodeIP = uploadNode.Node.LocalIP
grpcPort = uploadNode.Node.LocalGRPCPort
-
logger.Infof("client and node %d are at the same location, use local ip", uploadNode.Node.NodeID)
}
@@ -231,6 +258,11 @@ func uploadFile(file io.Reader, uploadNode UploadNodeInfo) (string, error) {
return fileHash, nil
}
+// uploadToNode 发送文件到指定的节点。
+// file:文件流。
+// nodeIP:节点的IP地址。
+// grpcPort:节点的gRPC端口。
+// 返回值:文件哈希和错误信息。
func uploadToNode(file io.Reader, nodeIP string, grpcPort int) (string, error) {
rpcCli, err := stgglb.AgentRPCPool.Acquire(nodeIP, grpcPort)
if err != nil {
@@ -241,23 +273,31 @@ func uploadToNode(file io.Reader, nodeIP string, grpcPort int) (string, error) {
return rpcCli.SendIPFSFile(file)
}
+// uploadToLocalIPFS 将文件上传到本地的IPFS节点,并根据需要将文件固定(pin)在节点上。
+// file: 要上传的文件,作为io.Reader提供。
+// nodeID: 指定上传到的IPFS节点的ID。
+// shouldPin: 指示是否在IPFS节点上固定(pin)上传的文件。如果为true,则文件会被固定,否则不会。
+// 返回上传文件的IPFS哈希值和可能出现的错误。
func uploadToLocalIPFS(file io.Reader, nodeID cdssdk.NodeID, shouldPin bool) (string, error) {
+ // 从IPFS池获取一个IPFS客户端实例
ipfsCli, err := stgglb.IPFSPool.Acquire()
if err != nil {
return "", fmt.Errorf("new ipfs client: %w", err)
}
- defer ipfsCli.Close()
+ defer ipfsCli.Close() // 确保IPFS客户端在函数返回前被释放
- // 从本地IPFS上传文件
+ // 在IPFS上创建文件并获取其哈希值
fileHash, err := ipfsCli.CreateFile(file)
if err != nil {
return "", fmt.Errorf("creating ipfs file: %w", err)
}
+ // 如果不需要固定文件,则直接返回文件哈希值
if !shouldPin {
return fileHash, nil
}
+ // 将文件固定在IPFS节点上
err = pinIPFSFile(nodeID, fileHash)
if err != nil {
return "", err
@@ -266,6 +306,10 @@ func uploadToLocalIPFS(file io.Reader, nodeID cdssdk.NodeID, shouldPin bool) (st
return fileHash, nil
}
+// pinIPFSFile 将文件Pin到IPFS节点。
+// nodeID:节点ID。
+// fileHash:文件哈希。
+// 返回值:错误信息。
func pinIPFSFile(nodeID cdssdk.NodeID, fileHash string) error {
agtCli, err := stgglb.AgentMQPool.Acquire(nodeID)
if err != nil {
@@ -273,7 +317,6 @@ func pinIPFSFile(nodeID cdssdk.NodeID, fileHash string) error {
}
defer stgglb.AgentMQPool.Release(agtCli)
- // 然后让最近节点pin本地上传的文件
_, err = agtCli.PinObject(agtmq.ReqPinObject([]string{fileHash}, false))
if err != nil {
return fmt.Errorf("start pinning object: %w", err)
diff --git a/common/pkgs/db/object.go b/common/pkgs/db/object.go
index 3655c98..b240791 100644
--- a/common/pkgs/db/object.go
+++ b/common/pkgs/db/object.go
@@ -84,38 +84,46 @@ func (*ObjectDB) GetPackageObjects(ctx SQLContext, packageID cdssdk.PackageID) (
return lo.Map(ret, func(o model.TempObject, idx int) model.Object { return o.ToObject() }), err
}
+// GetPackageObjectDetails 获取指定包ID的对象详情列表。
+//
+// ctx: SQL执行上下文。
+// packageID: 指定的包ID。
+//
+// 返回值为Object详情列表和可能出现的错误。
func (db *ObjectDB) GetPackageObjectDetails(ctx SQLContext, packageID cdssdk.PackageID) ([]stgmod.ObjectDetail, error) {
+ // 从Object表中查询所有属于指定包ID的对象,按ObjectID升序排序
var objs []model.TempObject
err := sqlx.Select(ctx, &objs, "select * from Object where PackageID = ? order by ObjectID asc", packageID)
if err != nil {
return nil, fmt.Errorf("getting objects: %w", err)
}
+ // 初始化返回的Object详情列表
rets := make([]stgmod.ObjectDetail, 0, len(objs))
+ // 从ObjectBlock表中查询所有属于指定包ID的对象块,按ObjectID和Index升序排序
var allBlocks []stgmod.ObjectBlock
err = sqlx.Select(ctx, &allBlocks, "select ObjectBlock.* from ObjectBlock, Object where PackageID = ? and ObjectBlock.ObjectID = Object.ObjectID order by ObjectBlock.ObjectID, `Index` asc", packageID)
if err != nil {
return nil, fmt.Errorf("getting all object blocks: %w", err)
}
+ // 从PinnedObject表中查询所有属于指定包ID的被固定的对象,按ObjectID排序
var allPinnedObjs []cdssdk.PinnedObject
err = sqlx.Select(ctx, &allPinnedObjs, "select PinnedObject.* from PinnedObject, Object where PackageID = ? and PinnedObject.ObjectID = Object.ObjectID order by PinnedObject.ObjectID", packageID)
if err != nil {
return nil, fmt.Errorf("getting all pinned objects: %w", err)
}
- blksCur := 0
- pinnedsCur := 0
+ // 遍历查询得到的对象,为每个对象构建详细的Object信息
+ blksCur := 0 // 当前遍历到的对象块索引
+ pinnedsCur := 0 // 当前遍历到的被固定对象索引
for _, temp := range objs {
detail := stgmod.ObjectDetail{
Object: temp.ToObject(),
}
- // 1. 查询Object和ObjectBlock时均按照ObjectID升序排序
- // 2. ObjectBlock结果集中的不同ObjectID数只会比Object结果集的少
- // 因此在两个结果集上同时从头开始遍历时,如果两边的ObjectID字段不同,那么一定是ObjectBlock这边的ObjectID > Object的ObjectID,
- // 此时让Object的遍历游标前进,直到两边的ObjectID再次相等
+ // 同时遍历对象和对象块的结果集,将属于同一对象的对象块附加到Object详情中
for ; blksCur < len(allBlocks); blksCur++ {
if allBlocks[blksCur].ObjectID != temp.ObjectID {
break
@@ -123,6 +131,7 @@ func (db *ObjectDB) GetPackageObjectDetails(ctx SQLContext, packageID cdssdk.Pac
detail.Blocks = append(detail.Blocks, allBlocks[blksCur])
}
+ // 遍历被固定对象的结果集,将被固定的信息附加到Object详情中
for ; pinnedsCur < len(allPinnedObjs); pinnedsCur++ {
if allPinnedObjs[pinnedsCur].ObjectID != temp.ObjectID {
break
@@ -130,6 +139,7 @@ func (db *ObjectDB) GetPackageObjectDetails(ctx SQLContext, packageID cdssdk.Pac
detail.PinnedAt = append(detail.PinnedAt, allPinnedObjs[pinnedsCur].NodeID)
}
+ // 将构建好的Object详情添加到返回列表中
rets = append(rets, detail)
}
diff --git a/common/pkgs/mq/coordinator/package.go b/common/pkgs/mq/coordinator/package.go
index 3d01e5d..c2917a0 100644
--- a/common/pkgs/mq/coordinator/package.go
+++ b/common/pkgs/mq/coordinator/package.go
@@ -47,6 +47,7 @@ func NewGetPackageResp(pkg model.Package) *GetPackageResp {
Package: pkg,
}
}
+
func (client *Client) GetPackage(msg *GetPackage) (*GetPackageResp, error) {
return mq.Request(Service.GetPackage, client.rabbitCli, msg)
}
@@ -77,6 +78,7 @@ func NewCreatePackageResp(packageID cdssdk.PackageID) *CreatePackageResp {
PackageID: packageID,
}
}
+
func (client *Client) CreatePackage(msg *CreatePackage) (*CreatePackageResp, error) {
return mq.Request(Service.CreatePackage, client.rabbitCli, msg)
}
diff --git a/coordinator/internal/mq/package.go b/coordinator/internal/mq/package.go
index ddab4ee..20f80fd 100644
--- a/coordinator/internal/mq/package.go
+++ b/coordinator/internal/mq/package.go
@@ -14,28 +14,46 @@ import (
coormq "gitlink.org.cn/cloudream/storage/common/pkgs/mq/coordinator"
)
+// GetPackage 通过PackageID获取包信息
+// 参数:
+// - msg: 包含需要获取的PackageID的请求消息
+// 返回值:
+// - *coormq.GetPackageResp: 获取包信息成功的响应
+// - *mq.CodeMessage: 错误时返回的错误信息
func (svc *Service) GetPackage(msg *coormq.GetPackage) (*coormq.GetPackageResp, *mq.CodeMessage) {
+ // 通过ID从数据库获取包信息
pkg, err := svc.db.Package().GetByID(svc.db.SQLCtx(), msg.PackageID)
if err != nil {
+ // 记录日志并返回错误信息
logger.WithField("PackageID", msg.PackageID).
Warnf("get package: %s", err.Error())
return nil, mq.Failed(errorcode.OperationFailed, "get package failed")
}
+ // 返回成功响应
return mq.ReplyOK(coormq.NewGetPackageResp(pkg))
}
+// CreatePackage 创建一个新的包
+// 参数:
+// - msg: 包含创建包所需信息的请求消息
+// 返回值:
+// - *coormq.CreatePackageResp: 创建包成功的响应
+// - *mq.CodeMessage: 错误时返回的错误信息
func (svc *Service) CreatePackage(msg *coormq.CreatePackage) (*coormq.CreatePackageResp, *mq.CodeMessage) {
var pkgID cdssdk.PackageID
+ // 在事务中执行创建包的操作
err := svc.db.DoTx(sql.LevelSerializable, func(tx *sqlx.Tx) error {
var err error
+ // 检查桶是否可用
isAvai, _ := svc.db.Bucket().IsAvailable(tx, msg.BucketID, msg.UserID)
if !isAvai {
return fmt.Errorf("bucket is not avaiable to the user")
}
+ // 创建包
pkgID, err = svc.db.Package().Create(tx, msg.BucketID, msg.Name)
if err != nil {
return fmt.Errorf("creating package: %w", err)
@@ -44,30 +62,40 @@ func (svc *Service) CreatePackage(msg *coormq.CreatePackage) (*coormq.CreatePack
return nil
})
if err != nil {
+ // 记录日志并返回错误信息
logger.WithField("BucketID", msg.BucketID).
WithField("Name", msg.Name).
Warn(err.Error())
return nil, mq.Failed(errorcode.OperationFailed, "creating package failed")
}
+ // 返回成功响应
return mq.ReplyOK(coormq.NewCreatePackageResp(pkgID))
}
+// UpdatePackage 更新包的信息
+// 参数:
+// - msg: 包含更新包所需信息的请求消息
+// 返回值:
+// - *coormq.UpdatePackageResp: 更新包成功的响应
+// - *mq.CodeMessage: 错误时返回的错误信息
func (svc *Service) UpdatePackage(msg *coormq.UpdatePackage) (*coormq.UpdatePackageResp, *mq.CodeMessage) {
+ // 在事务中执行更新包的操作
err := svc.db.DoTx(sql.LevelSerializable, func(tx *sqlx.Tx) error {
+ // 验证包是否存在
_, err := svc.db.Package().GetByID(tx, msg.PackageID)
if err != nil {
return fmt.Errorf("getting package by id: %w", err)
}
- // 先执行删除操作
+ // 删除对象
if len(msg.Deletes) > 0 {
if err := svc.db.Object().BatchDelete(tx, msg.Deletes); err != nil {
return fmt.Errorf("deleting objects: %w", err)
}
}
- // 再执行添加操作
+ // 添加对象
if len(msg.Adds) > 0 {
if _, err := svc.db.Object().BatchAdd(tx, msg.PackageID, msg.Adds); err != nil {
return fmt.Errorf("adding objects: %w", err)
@@ -77,25 +105,37 @@ func (svc *Service) UpdatePackage(msg *coormq.UpdatePackage) (*coormq.UpdatePack
return nil
})
if err != nil {
+ // 记录日志并返回错误信息
logger.WithField("PackageID", msg.PackageID).Warn(err.Error())
return nil, mq.Failed(errorcode.OperationFailed, "update package failed")
}
+ // 返回成功响应
return mq.ReplyOK(coormq.NewUpdatePackageResp())
}
+// DeletePackage 删除一个包
+// 参数:
+// - msg: 包含删除包所需信息的请求消息
+// 返回值:
+// - *coormq.DeletePackageResp: 删除包成功的响应
+// - *mq.CodeMessage: 错误时返回的错误信息
func (svc *Service) DeletePackage(msg *coormq.DeletePackage) (*coormq.DeletePackageResp, *mq.CodeMessage) {
+ // 在事务中执行删除包的操作
err := svc.db.DoTx(sql.LevelSerializable, func(tx *sqlx.Tx) error {
+ // 验证包是否可用
isAvai, _ := svc.db.Package().IsAvailable(tx, msg.UserID, msg.PackageID)
if !isAvai {
return fmt.Errorf("package is not available to the user")
}
+ // 软删除包
err := svc.db.Package().SoftDelete(tx, msg.PackageID)
if err != nil {
return fmt.Errorf("soft delete package: %w", err)
}
+ // 删除未使用的包
err = svc.db.Package().DeleteUnused(tx, msg.PackageID)
if err != nil {
logger.WithField("UserID", msg.UserID).
@@ -106,58 +146,72 @@ func (svc *Service) DeletePackage(msg *coormq.DeletePackage) (*coormq.DeletePack
return nil
})
if err != nil {
+ // 记录日志并返回错误信息
logger.WithField("UserID", msg.UserID).
WithField("PackageID", msg.PackageID).
Warnf(err.Error())
return nil, mq.Failed(errorcode.OperationFailed, "delete package failed")
}
+ // 返回成功响应
return mq.ReplyOK(coormq.NewDeletePackageResp())
}
+// GetPackageCachedNodes 获取缓存了指定package的节点信息
+// 参数:
+// - msg: 包含packageID和用户ID的信息请求
+// 返回值:
+// - *coormq.GetPackageCachedNodesResp: 包含缓存了package数据的节点信息列表
+// - *mq.CodeMessage: 错误信息,如果操作失败
func (svc *Service) GetPackageCachedNodes(msg *coormq.GetPackageCachedNodes) (*coormq.GetPackageCachedNodesResp, *mq.CodeMessage) {
+ // 检查package是否可用
isAva, err := svc.db.Package().IsAvailable(svc.db.SQLCtx(), msg.UserID, msg.PackageID)
if err != nil {
+ // 记录检查package可用性失败的日志
logger.WithField("UserID", msg.UserID).
WithField("PackageID", msg.PackageID).
Warnf("check package available failed, err: %s", err.Error())
return nil, mq.Failed(errorcode.OperationFailed, "check package available failed")
}
if !isAva {
+ // 记录package不可用的日志
logger.WithField("UserID", msg.UserID).
WithField("PackageID", msg.PackageID).
Warnf("package is not available to the user")
return nil, mq.Failed(errorcode.OperationFailed, "package is not available to the user")
}
- // 这个函数只是统计哪些节点缓存了Package中的数据,不需要多么精确,所以可以不用事务
+ // 获取package中的对象详情,用于后续统计节点缓存信息
objDetails, err := svc.db.Object().GetPackageObjectDetails(svc.db.SQLCtx(), msg.PackageID)
if err != nil {
+ // 记录获取package对象详情失败的日志
logger.WithField("PackageID", msg.PackageID).
Warnf("get package block details: %s", err.Error())
return nil, mq.Failed(errorcode.OperationFailed, "get package block details failed")
}
+ // 统计各节点缓存的文件信息
var packageSize int64
nodeInfoMap := make(map[cdssdk.NodeID]*cdssdk.NodePackageCachingInfo)
for _, obj := range objDetails {
- // 只要存了文件的一个块,就认为此节点存了整个文件
for _, block := range obj.Blocks {
+ // 更新或创建节点缓存信息
info, ok := nodeInfoMap[block.NodeID]
if !ok {
info = &cdssdk.NodePackageCachingInfo{
NodeID: block.NodeID,
}
nodeInfoMap[block.NodeID] = info
-
}
+ // 更新节点的文件大小和对象计数
info.FileSize += obj.Object.Size
info.ObjectCount++
}
}
+ // 整理节点缓存信息,并按节点ID排序
var nodeInfos []cdssdk.NodePackageCachingInfo
for _, nodeInfo := range nodeInfoMap {
nodeInfos = append(nodeInfos, *nodeInfo)
@@ -166,17 +220,27 @@ func (svc *Service) GetPackageCachedNodes(msg *coormq.GetPackageCachedNodes) (*c
sort.Slice(nodeInfos, func(i, j int) bool {
return nodeInfos[i].NodeID < nodeInfos[j].NodeID
})
+ // 返回成功响应,包含节点缓存信息
return mq.ReplyOK(coormq.NewGetPackageCachedNodesResp(nodeInfos, packageSize))
}
+// GetPackageLoadedNodes 获取加载了指定package的节点ID列表
+// 参数:
+// - msg: 包含packageID的信息请求
+// 返回值:
+// - *coormq.GetPackageLoadedNodesResp: 包含加载了package的节点ID列表
+// - *mq.CodeMessage: 错误信息,如果操作失败
func (svc *Service) GetPackageLoadedNodes(msg *coormq.GetPackageLoadedNodes) (*coormq.GetPackageLoadedNodesResp, *mq.CodeMessage) {
+ // 根据packageID查找相关的存储信息
storages, err := svc.db.StoragePackage().FindPackageStorages(svc.db.SQLCtx(), msg.PackageID)
if err != nil {
+ // 记录查找存储信息失败的日志
logger.WithField("PackageID", msg.PackageID).
Warnf("get storages by packageID failed, err: %s", err.Error())
return nil, mq.Failed(errorcode.OperationFailed, "get storages by packageID failed")
}
+ // 去重,获取唯一节点ID列表
uniqueNodeIDs := make(map[cdssdk.NodeID]bool)
var nodeIDs []cdssdk.NodeID
for _, stg := range storages {
@@ -186,18 +250,28 @@ func (svc *Service) GetPackageLoadedNodes(msg *coormq.GetPackageLoadedNodes) (*c
}
}
+ // 返回成功响应,包含节点ID列表
return mq.ReplyOK(coormq.NewGetPackageLoadedNodesResp(nodeIDs))
}
+// GetPackageLoadLogDetails 获取指定package的加载日志详情
+// 参数:
+// - msg: 包含packageID的信息请求
+// 返回值:
+// - *coormq.GetPackageLoadLogDetailsResp: 包含package加载日志的详细信息列表
+// - *mq.CodeMessage: 错误信息,如果操作失败
func (svc *Service) GetPackageLoadLogDetails(msg *coormq.GetPackageLoadLogDetails) (*coormq.GetPackageLoadLogDetailsResp, *mq.CodeMessage) {
var logs []coormq.PackageLoadLogDetail
+ // 根据packageID获取加载日志
rawLogs, err := svc.db.StoragePackageLog().GetByPackageID(svc.db.SQLCtx(), msg.PackageID)
if err != nil {
+ // 记录获取加载日志失败的日志
logger.WithField("PackageID", msg.PackageID).
Warnf("getting storage package log: %s", err.Error())
return nil, mq.Failed(errorcode.OperationFailed, "get storage package log failed")
}
+ // 通过存储ID获取存储信息,用于填充日志详情
stgs := make(map[cdssdk.StorageID]model.Storage)
for _, raw := range rawLogs {
@@ -205,6 +279,7 @@ func (svc *Service) GetPackageLoadLogDetails(msg *coormq.GetPackageLoadLogDetail
if !ok {
stg, err = svc.db.Storage().GetByID(svc.db.SQLCtx(), raw.StorageID)
if err != nil {
+ // 记录获取存储信息失败的日志
logger.WithField("PackageID", msg.PackageID).
Warnf("getting storage: %s", err.Error())
return nil, mq.Failed(errorcode.OperationFailed, "get storage failed")
@@ -213,6 +288,7 @@ func (svc *Service) GetPackageLoadLogDetails(msg *coormq.GetPackageLoadLogDetail
stgs[raw.StorageID] = stg
}
+ // 填充日志详情
logs = append(logs, coormq.PackageLoadLogDetail{
Storage: stg,
UserID: raw.UserID,
@@ -220,5 +296,6 @@ func (svc *Service) GetPackageLoadLogDetails(msg *coormq.GetPackageLoadLogDetail
})
}
+ // 返回成功响应,包含package加载日志详情
return mq.ReplyOK(coormq.RespGetPackageLoadLogDetails(logs))
}
diff --git a/coordinator/main.go b/coordinator/main.go
index 182e847..fe35eec 100644
--- a/coordinator/main.go
+++ b/coordinator/main.go
@@ -1,5 +1,6 @@
package main
+// 主程序包,负责初始化和启动协调器服务器。
import (
"fmt"
"os"
@@ -12,48 +13,58 @@ import (
"gitlink.org.cn/cloudream/storage/coordinator/internal/mq"
)
+// 主函数,负责程序的初始化和启动。
func main() {
+ // 初始化配置
err := config.Init()
if err != nil {
fmt.Printf("init config failed, err: %s", err.Error())
os.Exit(1)
}
+ // 初始化日志系统
err = logger.Init(&config.Cfg().Logger)
if err != nil {
fmt.Printf("init logger failed, err: %s", err.Error())
os.Exit(1)
}
+ // 初始化数据库连接
db, err := mydb.NewDB(&config.Cfg().DB)
if err != nil {
logger.Fatalf("new db failed, err: %s", err.Error())
}
+ // 初始化扫描器客户端
scanner, err := scmq.NewClient(&config.Cfg().RabbitMQ)
if err != nil {
logger.Fatalf("new scanner client failed, err: %s", err.Error())
}
+ // 初始化协调器服务器
coorSvr, err := coormq.NewServer(mq.NewService(db, scanner), &config.Cfg().RabbitMQ)
if err != nil {
logger.Fatalf("new coordinator server failed, err: %s", err.Error())
}
+ // 设置协调器服务器错误处理
coorSvr.OnError(func(err error) {
logger.Warnf("coordinator server err: %s", err.Error())
})
- // 启动服务
+ // 启动协调器服务器为异步操作
go serveCoorServer(coorSvr)
+ // 永久等待,保持程序运行
forever := make(chan bool)
<-forever
}
+// serveCoorServer 启动并运行协调器服务器。
func serveCoorServer(server *coormq.Server) {
logger.Info("start serving command server")
+ // 服务启动和错误处理
err := server.Serve()
if err != nil {
logger.Errorf("command server stopped with error: %s", err.Error())
diff --git a/go.mod b/go.mod
index 256334b..aaeaa04 100644
--- a/go.mod
+++ b/go.mod
@@ -6,13 +6,12 @@ replace gitlink.org.cn/cloudream/common v0.0.0 => ../common
require (
github.com/baohan10/reedsolomon v0.0.0-20230406042632-43574cac9fa7
- github.com/beevik/etree v1.2.0
github.com/gin-gonic/gin v1.9.1
- github.com/go-ping/ping v1.1.0
github.com/go-sql-driver/mysql v1.7.1
- github.com/ipfs/go-ipfs-api v0.7.0
+ github.com/google/uuid v1.3.0
github.com/jedib0t/go-pretty/v6 v6.4.7
github.com/jmoiron/sqlx v1.3.5
+ github.com/klauspost/reedsolomon v1.11.8
github.com/magefile/mage v1.15.0
github.com/samber/lo v1.38.1
github.com/smartystreets/goconvey v1.8.1
@@ -31,31 +30,38 @@ require (
github.com/coreos/go-systemd/v22 v22.5.0 // indirect
github.com/crackcomm/go-gitignore v0.0.0-20170627025303-887ab5e44cc3 // indirect
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0 // indirect
+ github.com/fatih/color v1.16.0 // indirect
+ github.com/fogleman/gg v1.3.0 // indirect
github.com/gabriel-vasile/mimetype v1.4.2 // indirect
github.com/gin-contrib/sse v0.1.0 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.14.0 // indirect
+ github.com/goccy/go-graphviz v0.1.2 // indirect
github.com/goccy/go-json v0.10.2 // indirect
github.com/gogo/protobuf v1.3.2 // indirect
+ github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 // indirect
github.com/golang/protobuf v1.5.3 // indirect
github.com/google/go-cmp v0.5.9 // indirect
- github.com/google/uuid v1.3.0 // indirect
+ github.com/google/licensecheck v0.3.1 // indirect
+ github.com/google/safehtml v0.0.3-0.20211026203422-d6f0e11a5516 // indirect
github.com/gopherjs/gopherjs v1.17.2 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/imdario/mergo v0.3.15 // indirect
github.com/ipfs/boxo v0.12.0 // indirect
github.com/ipfs/go-cid v0.4.1 // indirect
+ github.com/ipfs/go-ipfs-api v0.7.0 // indirect
+ github.com/jessevdk/go-flags v1.4.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/jtolds/gls v4.20.0+incompatible // indirect
github.com/klauspost/cpuid/v2 v2.2.4 // indirect
- github.com/klauspost/reedsolomon v1.11.8 // indirect
github.com/leodido/go-urn v1.2.4 // indirect
github.com/libp2p/go-buffer-pool v0.1.0 // indirect
github.com/libp2p/go-flow-metrics v0.1.0 // indirect
github.com/libp2p/go-libp2p v0.26.3 // indirect
- github.com/mattn/go-isatty v0.0.19 // indirect
+ github.com/mattn/go-colorable v0.1.13 // indirect
+ github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-runewidth v0.0.13 // indirect
github.com/minio/sha256-simd v1.0.0 // indirect
github.com/mitchellh/go-homedir v1.1.0 // indirect
@@ -71,15 +77,20 @@ require (
github.com/multiformats/go-multihash v0.2.3 // indirect
github.com/multiformats/go-multistream v0.4.1 // indirect
github.com/multiformats/go-varint v0.0.7 // indirect
- github.com/otiai10/copy v1.12.0 // indirect
+ github.com/ofabry/go-callvis v0.7.0 // indirect
github.com/pelletier/go-toml/v2 v2.0.8 // indirect
+ github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
+ github.com/pkg/errors v0.9.1 // indirect
github.com/rivo/uniseg v0.2.0 // indirect
+ github.com/robertkrimen/godocdown v0.0.0-20130622164427-0bfa04905481 // indirect
github.com/sirupsen/logrus v1.9.2 // indirect
github.com/smarty/assertions v1.15.0 // indirect
github.com/spaolacci/murmur3 v1.1.0 // indirect
github.com/streadway/amqp v1.1.0 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
+ github.com/uber/go-torch v0.0.0-20181107071353-86f327cc820e // indirect
github.com/ugorji/go/codec v1.2.11 // indirect
+ github.com/yuin/goldmark v1.7.1 // indirect
github.com/zyedidia/generic v1.2.1 // indirect
go.etcd.io/etcd/api/v3 v3.5.9 // indirect
go.etcd.io/etcd/client/pkg/v3 v3.5.9 // indirect
@@ -88,15 +99,21 @@ require (
go.uber.org/multierr v1.9.0 // indirect
go.uber.org/zap v1.24.0 // indirect
golang.org/x/arch v0.3.0 // indirect
- golang.org/x/crypto v0.9.0 // indirect
+ golang.org/x/crypto v0.22.0 // indirect
golang.org/x/exp v0.0.0-20230519143937-03e91628a987 // indirect
- golang.org/x/net v0.10.0 // indirect
- golang.org/x/sync v0.1.0 // indirect
- golang.org/x/sys v0.8.0 // indirect
- golang.org/x/text v0.9.0 // indirect
- google.golang.org/genproto v0.0.0-20230526161137-0005af68ea54 // indirect
- google.golang.org/genproto/googleapis/api v0.0.0-20230525234035-dd9d682886f9 // indirect
- google.golang.org/genproto/googleapis/rpc v0.0.0-20230525234030-28d5490b6b19 // indirect
+ golang.org/x/image v0.15.0 // indirect
+ golang.org/x/mod v0.17.0 // indirect
+ golang.org/x/net v0.24.0 // indirect
+ golang.org/x/pkgsite v0.0.0-20240405142909-b8abe0819782 // indirect
+ golang.org/x/sync v0.7.0 // indirect
+ golang.org/x/sys v0.19.0 // indirect
+ golang.org/x/text v0.14.0 // indirect
+ golang.org/x/tools v0.20.0 // indirect
+ golang.org/x/tools/go/pointer v0.1.0-deprecated // indirect
+ google.golang.org/genproto v0.0.0-20230530153820-e85fd2cbaebc // indirect
+ google.golang.org/genproto/googleapis/api v0.0.0-20230530153820-e85fd2cbaebc // indirect
+ google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
lukechampine.com/blake3 v1.1.7 // indirect
+ rsc.io/markdown v0.0.0-20231214224604-88bb533a6020 // indirect
)
diff --git a/go.sum b/go.sum
index 649fa14..be88f69 100644
--- a/go.sum
+++ b/go.sum
@@ -2,8 +2,6 @@ github.com/antonfisher/nested-logrus-formatter v1.3.1 h1:NFJIr+pzwv5QLHTPyKz9UME
github.com/antonfisher/nested-logrus-formatter v1.3.1/go.mod h1:6WTfyWFkBc9+zyBaKIqRrg/KwMqBbodBjgbHjDz7zjA=
github.com/baohan10/reedsolomon v0.0.0-20230406042632-43574cac9fa7 h1:wcvD6enR///dFvb9cRodx5SGbPH4G4jPjw+aVIWkAKE=
github.com/baohan10/reedsolomon v0.0.0-20230406042632-43574cac9fa7/go.mod h1:rAxMF6pVaFK/s6T4gGczvloccNbtwzuYaP2Y7W6flE8=
-github.com/beevik/etree v1.2.0 h1:l7WETslUG/T+xOPs47dtd6jov2Ii/8/OjCldk5fYfQw=
-github.com/beevik/etree v1.2.0/go.mod h1:aiPf89g/1k3AShMVAzriilpcE4R/Vuor90y83zVZWFc=
github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A=
github.com/benbjohnson/clock v1.3.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=
github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM=
@@ -27,14 +25,16 @@ github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs
github.com/decred/dcrd/crypto/blake256 v1.0.0 h1:/8DMNYp9SGi5f0w7uCm6d6M4OU2rGFK09Y2A4Xv7EE0=
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0 h1:HbphB4TFFXpv7MNrT52FGrrgVXF1owhMVTHFZIlnvd4=
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0/go.mod h1:DZGJHZMqrU4JJqFAWUS2UO1+lbSKsdiOoYi9Zzey7Fc=
+github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM=
+github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE=
+github.com/fogleman/gg v1.3.0 h1:/7zJX8F6AaYQc57WQCyN9cAIz+4bCJGO9B+dyW29am8=
+github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k=
github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU=
github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA=
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
-github.com/go-ping/ping v1.1.0 h1:3MCGhVX4fyEUuhsfwPrsEdQw6xspHkv5zHsiSoDFZYw=
-github.com/go-ping/ping v1.1.0/go.mod h1:xIFjORFzTxqIV/tDVGO4eDy/bLuSyawEeojSm3GfRGk=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
@@ -45,11 +45,15 @@ github.com/go-playground/validator/v10 v10.14.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QX
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-sql-driver/mysql v1.7.1 h1:lUIinVbN1DY0xBg0eMOzmmtGoHwWBbvnWubQUrtU8EI=
github.com/go-sql-driver/mysql v1.7.1/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI=
+github.com/goccy/go-graphviz v0.1.2 h1:sWSJ6w13BCm/ZOUTHDVrdvbsxqN8yyzaFcHrH/hQ9Yg=
+github.com/goccy/go-graphviz v0.1.2/go.mod h1:pMYpbAqJT10V8dzV1JN/g/wUlG/0imKPzn3ZsrchGCI=
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
+github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 h1:DACJavvAHhabrF08vX0COfcOBJRhZ8lUbR+ZWIs0Y5g=
+github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
@@ -57,12 +61,14 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
-github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/google/licensecheck v0.3.1 h1:QoxgoDkaeC4nFrtGN1jV7IPmDCHFNIVh54e5hSt6sPs=
+github.com/google/licensecheck v0.3.1/go.mod h1:ORkR35t/JjW+emNKtfJDII0zlciG9JgbT7SmsohlHmY=
+github.com/google/safehtml v0.0.3-0.20211026203422-d6f0e11a5516 h1:pSEdbeokt55L2hwtWo6A2k7u5SG08rmw0LhWEyrdWgk=
+github.com/google/safehtml v0.0.3-0.20211026203422-d6f0e11a5516/go.mod h1:L4KWwDsUJdECRAEpZoBn3O64bQaywRscowZjJAzjHnU=
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gopherjs/gopherjs v1.17.2 h1:fQnZVsXk8uxXIStYb0N4bGk7jeyTalG/wsZjQ25dO0g=
github.com/gopherjs/gopherjs v1.17.2/go.mod h1:pRRIvn/QzFLrKfvEz3qUuEhtE/zLCWfreZ6J5gM2i+k=
-github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
@@ -78,6 +84,8 @@ github.com/ipfs/go-ipfs-api v0.7.0 h1:CMBNCUl0b45coC+lQCXEVpMhwoqjiaCwUIrM+coYW2
github.com/ipfs/go-ipfs-api v0.7.0/go.mod h1:AIxsTNB0+ZhkqIfTZpdZ0VR/cpX5zrXjATa3prSay3g=
github.com/jedib0t/go-pretty/v6 v6.4.7 h1:lwiTJr1DEkAgzljsUsORmWsVn5MQjt1BPJdPCtJ6KXE=
github.com/jedib0t/go-pretty/v6 v6.4.7/go.mod h1:Ndk3ase2CkQbXLLNf5QDHoYb6J9WtVfmHZu9n8rk2xs=
+github.com/jessevdk/go-flags v1.4.0 h1:4IU2WS7AumrZ/40jfhf4QVDMsQwqA7VEHozFRrGARJA=
+github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g=
github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
@@ -96,6 +104,7 @@ github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
+github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
github.com/libp2p/go-buffer-pool v0.1.0 h1:oK4mSFcQz7cTQIfqbe4MIj9gLW+mnanjyFtc6cdF0Y8=
github.com/libp2p/go-buffer-pool v0.1.0/go.mod h1:N+vh8gMqimBzdKkSMVuydVDq+UV5QTWy5HSiZacSbPg=
github.com/libp2p/go-flow-metrics v0.1.0 h1:0iPhMI8PskQwzh57jB9WxIuIOQ0r+15PChFGkx3Q3WM=
@@ -104,13 +113,17 @@ github.com/libp2p/go-libp2p v0.26.3 h1:6g/psubqwdaBqNNoidbRKSTBEYgaOuKBhHl8Q5tO+
github.com/libp2p/go-libp2p v0.26.3/go.mod h1:x75BN32YbwuY0Awm2Uix4d4KOz+/4piInkp4Wr3yOo8=
github.com/magefile/mage v1.15.0 h1:BvGheCMAsG3bWUDbZ8AyXXpCNwU9u5CB6sM+HNb9HYg=
github.com/magefile/mage v1.15.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A=
+github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
+github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
+github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA=
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
+github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
+github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU=
github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg=
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
-github.com/minio/blake2b-simd v0.0.0-20160723061019-3f5f724cb5b1/go.mod h1:pD8RvIylQ358TN4wwqatJ8rNavkEINozVn9DtGI3dfQ=
github.com/minio/sha256-simd v1.0.0 h1:v1ta+49hkWZyvaKwrQB8elexRqm6Y0aMLjCNsrYxo6g=
github.com/minio/sha256-simd v1.0.0/go.mod h1:OuYzVNI5vcoYIAmbIvHPl3N3jUzVedXbKy5RFepssQM=
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
@@ -140,17 +153,21 @@ github.com/multiformats/go-multistream v0.4.1 h1:rFy0Iiyn3YT0asivDUIR05leAdwZq3d
github.com/multiformats/go-multistream v0.4.1/go.mod h1:Mz5eykRVAjJWckE2U78c6xqdtyNUEhKSM0Lwar2p77Q=
github.com/multiformats/go-varint v0.0.7 h1:sWSGR+f/eu5ABZA2ZpYKBILXTTs9JWpdEM/nEGOHFS8=
github.com/multiformats/go-varint v0.0.7/go.mod h1:r8PUYw/fD/SjBCiKOoDlGF6QawOELpZAu9eioSos/OU=
-github.com/otiai10/copy v1.12.0 h1:cLMgSQnXBs1eehF0Wy/FAGsgDTDmAqFR7rQylBb1nDY=
-github.com/otiai10/copy v1.12.0/go.mod h1:rSaLseMUsZFFbsFGc7wCJnnkTAvdc5L6VWxPE4308Ww=
-github.com/otiai10/mint v1.5.1 h1:XaPLeE+9vGbuyEHem1JNk3bYc7KKqyI/na0/mLd/Kks=
+github.com/ofabry/go-callvis v0.7.0 h1:kh8TYgER49uZDlMrYviHchBs+I4n/SgiZXv45CVkqiE=
+github.com/ofabry/go-callvis v0.7.0/go.mod h1:z/1SpfLX72BjG8mgjy77/VWK5xJ9YBytCBnQeQnRObQ=
github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ=
github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4=
+github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
+github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
+github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/profile v1.6.0/go.mod h1:qBsxPvzyUincmltOk6iyRVxHYg4adc0OFOv72ZdLa18=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
+github.com/robertkrimen/godocdown v0.0.0-20130622164427-0bfa04905481 h1:jMxcLa+VjJKhpCwbLUXAD15wJ+hhvXMLujCl3MkXpfM=
+github.com/robertkrimen/godocdown v0.0.0-20130622164427-0bfa04905481/go.mod h1:C9WhFzY47SzYBIvzFqSvHIR6ROgDo4TtdTuRaOMjF/s=
github.com/samber/lo v1.38.1 h1:j2XEAqXKb09Am4ebOg31SpvzUTTs6EN3VfgeLUhPdXM=
github.com/samber/lo v1.38.1/go.mod h1:+m/ZKRl6ClXCE2Lgf3MsQlWfh4bn1bz6CXEOxnEXnEA=
github.com/sirupsen/logrus v1.9.2 h1:oxx1eChJGI6Uks2ZC4W1zpLlVgqB8ner4EuQwV4Ik1Y=
@@ -177,10 +194,16 @@ github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
+github.com/uber/go-torch v0.0.0-20181107071353-86f327cc820e h1:jV0Y58RWaOMT3i5foW2YoEKlaN6biewBtngFwAfEwQ0=
+github.com/uber/go-torch v0.0.0-20181107071353-86f327cc820e/go.mod h1:uuMPbyv6WJykZcarrIuJiTjfSGC997/jnfHyyeeG2Jo=
github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU=
github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE=
+github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
+github.com/yuin/goldmark v1.7.1 h1:3bajkSilaCbjdKVsKdZjZCLBNPL9pYzrCakKaf4U49U=
+github.com/yuin/goldmark v1.7.1/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E=
github.com/zyedidia/generic v1.2.1 h1:Zv5KS/N2m0XZZiuLS82qheRG4X1o5gsWreGb0hR7XDc=
github.com/zyedidia/generic v1.2.1/go.mod h1:ly2RBz4mnz1yeuVbQA/VFwGjK3mnHGRj1JuoG336Bis=
go.etcd.io/etcd/api/v3 v3.5.9 h1:4wSsluwyTbGGmyjJktOf3wFQoTBIURXHnq9n/G/JQHs=
@@ -202,55 +225,67 @@ golang.org/x/arch v0.3.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
-golang.org/x/crypto v0.9.0 h1:LF6fAI+IutBocDJ2OT0Q1g8plpYljMZ4+lty+dsqw3g=
-golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0=
+golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30=
+golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M=
golang.org/x/exp v0.0.0-20230519143937-03e91628a987 h1:3xJIFvzUFbu4ls0BTBYcgbCGhA63eAOEMxIHugyXJqA=
golang.org/x/exp v0.0.0-20230519143937-03e91628a987/go.mod h1:V1LtkGg67GoY2N1AnLN78QLrzxkLyJw7RJb1gzOOz9w=
+golang.org/x/image v0.15.0 h1:kOELfmgrmJlw4Cdb7g/QGuB3CvDrXbqEIww/pNtNBm8=
+golang.org/x/image v0.15.0/go.mod h1:HUYqC05R2ZcZ3ejNQsIHQDQiwWM4JBqmm6MKANTp4LE=
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA=
+golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
-golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc=
-golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M=
-golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
+golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w=
+golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8=
+golang.org/x/pkgsite v0.0.0-20240405142909-b8abe0819782 h1:LpBNDVFgFjnIZg+JzqKB2rSZCwV5o0NaYRZyAHBy8oI=
+golang.org/x/pkgsite v0.0.0-20240405142909-b8abe0819782/go.mod h1:LvGpGBkKBoQCkJOxRtjQEMJRvNMpoKcMjSzg3pjgPOw=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o=
-golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
+golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU=
-golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o=
+golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
-golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE=
-golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
+golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
+golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
+golang.org/x/tools v0.20.0 h1:hz/CVckiOxybQvFw6h7b/q80NTr9IUQb4s1IIzW7KNY=
+golang.org/x/tools v0.20.0/go.mod h1:WvitBU7JJf6A4jOdg4S1tviW9bhUxkgeCui/0JHctQg=
+golang.org/x/tools/go/pointer v0.1.0-deprecated h1:PwCkqv2FT35Z4MVxR/tUlvLoL0TkxDjShpBrE4p18Ho=
+golang.org/x/tools/go/pointer v0.1.0-deprecated/go.mod h1:Jd+I2inNruJ+5VRdS+jU4S1t17z5y+UCCRa/eBRwilA=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/genproto v0.0.0-20230526161137-0005af68ea54 h1:9NWlQfY2ePejTmfwUH1OWwmznFa+0kKcHGPDvcPza9M=
google.golang.org/genproto v0.0.0-20230526161137-0005af68ea54/go.mod h1:zqTuNwFlFRsw5zIts5VnzLQxSRqh+CGOTVMlYbY0Eyk=
+google.golang.org/genproto v0.0.0-20230530153820-e85fd2cbaebc h1:8DyZCyvI8mE1IdLy/60bS+52xfymkE72wv1asokgtao=
+google.golang.org/genproto v0.0.0-20230530153820-e85fd2cbaebc/go.mod h1:xZnkP7mREFX5MORlOPEzLMr+90PPZQ2QWzrVTWfAq64=
google.golang.org/genproto/googleapis/api v0.0.0-20230525234035-dd9d682886f9 h1:m8v1xLLLzMe1m5P+gCTF8nJB9epwZQUBERm20Oy1poQ=
google.golang.org/genproto/googleapis/api v0.0.0-20230525234035-dd9d682886f9/go.mod h1:vHYtlOoi6TsQ3Uk2yxR7NI5z8uoV+3pZtR4jmHIkRig=
+google.golang.org/genproto/googleapis/api v0.0.0-20230530153820-e85fd2cbaebc h1:kVKPf/IiYSBWEWtkIn6wZXwWGCnLKcC8oWfZvXjsGnM=
+google.golang.org/genproto/googleapis/api v0.0.0-20230530153820-e85fd2cbaebc/go.mod h1:vHYtlOoi6TsQ3Uk2yxR7NI5z8uoV+3pZtR4jmHIkRig=
google.golang.org/genproto/googleapis/rpc v0.0.0-20230525234030-28d5490b6b19 h1:0nDDozoAU19Qb2HwhXadU8OcsiO/09cnTqhUtq2MEOM=
google.golang.org/genproto/googleapis/rpc v0.0.0-20230525234030-28d5490b6b19/go.mod h1:66JfowdXAEgad5O9NnYcsNPLCPZJD++2L9X0PCMODrA=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc h1:XSJ8Vk1SWuNr8S18z1NZSziL0CPIXLCCMDOEFtHBOFc=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc/go.mod h1:66JfowdXAEgad5O9NnYcsNPLCPZJD++2L9X0PCMODrA=
google.golang.org/grpc v1.57.0 h1:kfzNeI/klCGD2YPMUlaGNT3pxvYfga7smW3Vth8Zsiw=
google.golang.org/grpc v1.57.0/go.mod h1:Sd+9RMTACXwmub0zcNY2c4arhtrbBYD1AUHI/dt16Mo=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
@@ -265,4 +300,6 @@ gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
lukechampine.com/blake3 v1.1.7 h1:GgRMhmdsuK8+ii6UZFDL8Nb+VyMwadAgcJyfYHxG6n0=
lukechampine.com/blake3 v1.1.7/go.mod h1:tkKEOtDkNtklkXtLNEOGNq5tcV90tJiA1vAA12R78LA=
+rsc.io/markdown v0.0.0-20231214224604-88bb533a6020 h1:GqQcl3Kno/rOntek8/d8axYjau8r/c1zVFojXS6WJFI=
+rsc.io/markdown v0.0.0-20231214224604-88bb533a6020/go.mod h1:8xcPgWmwlZONN1D9bjxtHEjrUtSEa3fakVF8iaewYKQ=
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
diff --git a/log/agent.log b/log/agent.log
new file mode 100644
index 0000000..38ead43
--- /dev/null
+++ b/log/agent.log
@@ -0,0 +1,870 @@
+2024-04-10 12:36:23 [DEBU] [:Collector] do testing
+2024-04-10 12:36:23 [INFO] [:Collector] start connectivity reporter
+2024-04-10 12:36:25 [WARN] [:Collector] [NodeID:1] pre ping: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp [::1]:5010: connectex: No connection could be made because the target machine actively refused it."
+2024-04-10 12:36:25 [INFO] start serving distlock
+2024-04-10 12:36:25 [INFO] start serving command server
+2024-04-10 12:36:25 [INFO] start serving grpc
+2024-04-10 12:37:08 [DEBU] [:Collector] do testing
+2024-04-10 12:42:11 [DEBU] [:Collector] do testing
+2024-04-10 12:47:11 [DEBU] [:Collector] do testing
+2024-04-10 13:04:14 [DEBU] [:Collector] do testing
+2024-04-10 13:04:14 [WARN] agent server err: deserialize error: channel is closed
+2024-04-10 13:04:14 [ERRO] command server stopped with error: receive message error: channel is closed
+2024-04-10 13:04:14 [INFO] command server stopped
+2024-04-10 13:07:11 [DEBU] [:Collector] do testing
+2024-04-10 13:12:11 [DEBU] [:Collector] do testing
+2024-04-10 13:17:11 [DEBU] [:Collector] do testing
+2024-04-10 13:22:11 [DEBU] [:Collector] do testing
+2024-04-10 13:27:11 [DEBU] [:Collector] do testing
+2024-04-10 13:32:11 [DEBU] [:Collector] do testing
+2024-04-10 14:01:30 [DEBU] [:Collector] do testing
+2024-04-10 14:02:11 [DEBU] [:Collector] do testing
+2024-04-10 14:07:11 [DEBU] [:Collector] do testing
+2024-04-10 14:12:11 [DEBU] [:Collector] do testing
+2024-04-10 14:17:11 [DEBU] [:Collector] do testing
+2024-04-10 14:22:11 [DEBU] [:Collector] do testing
+2024-04-10 14:27:11 [DEBU] [:Collector] do testing
+2024-04-10 14:32:11 [DEBU] [:Collector] do testing
+2024-04-10 14:37:11 [DEBU] [:Collector] do testing
+2024-04-10 14:40:52 [DEBU] [:Collector] do testing
+2024-04-10 14:40:52 [INFO] [:Collector] start connectivity reporter
+2024-04-10 14:41:13 [DEBU] [:Collector] do testing
+2024-04-10 14:41:37 [INFO] start serving command server
+2024-04-10 14:41:37 [INFO] start serving distlock
+2024-04-10 14:41:37 [INFO] start serving grpc
+2024-04-10 14:41:58 [DEBU] [:Collector] do testing
+2024-04-10 14:46:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 14:46:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 14:46:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 14:46:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 14:46:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 14:46:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 14:46:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 14:46:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 14:46:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 14:46:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 14:46:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 14:46:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 14:46:58 [DEBU] [:Collector] do testing
+2024-04-10 14:51:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 14:51:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 14:51:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 14:51:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 14:51:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 14:51:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 14:51:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 14:51:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 14:51:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 14:51:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 14:51:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 14:51:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 14:51:58 [DEBU] [:Collector] do testing
+2024-04-10 14:56:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 14:56:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 14:56:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 14:56:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 14:56:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 14:56:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 14:56:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 14:56:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 14:56:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 14:56:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 14:56:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 14:56:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 14:56:58 [DEBU] [:Collector] do testing
+2024-04-10 15:01:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 15:01:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 15:01:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 15:01:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 15:01:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 15:01:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 15:01:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 15:01:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 15:01:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 15:01:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 15:01:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 15:01:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 15:01:58 [DEBU] [:Collector] do testing
+2024-04-10 15:06:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 15:06:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 15:06:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 15:06:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 15:06:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 15:06:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 15:06:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 15:06:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 15:06:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 15:06:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 15:06:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 15:06:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 15:06:58 [DEBU] [:Collector] do testing
+2024-04-10 15:11:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 15:11:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 15:11:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 15:11:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 15:11:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 15:11:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 15:11:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 15:11:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 15:11:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 15:11:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 15:11:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 15:11:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 15:11:58 [DEBU] [:Collector] do testing
+2024-04-10 15:16:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 15:16:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 15:16:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 15:16:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 15:16:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 15:16:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 15:16:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 15:16:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 15:16:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 15:16:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 15:16:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 15:16:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 15:16:58 [DEBU] [:Collector] do testing
+2024-04-10 15:21:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 15:21:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 15:21:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 15:21:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 15:21:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 15:21:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 15:21:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 15:21:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 15:21:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 15:21:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 15:21:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 15:21:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 15:21:58 [DEBU] [:Collector] do testing
+2024-04-10 15:26:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 15:26:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 15:26:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 15:26:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 15:26:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 15:26:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 15:26:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 15:26:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 15:26:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 15:26:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 15:26:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 15:26:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 15:26:58 [DEBU] [:Collector] do testing
+2024-04-10 15:31:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 15:31:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 15:31:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 15:31:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 15:31:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 15:31:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 15:31:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 15:31:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 15:31:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 15:31:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 15:31:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 15:31:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 15:31:58 [DEBU] [:Collector] do testing
+2024-04-10 15:36:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 15:36:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 15:36:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 15:36:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 15:36:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 15:36:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 15:36:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 15:36:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 15:36:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 15:36:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 15:36:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 15:36:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 15:36:58 [DEBU] [:Collector] do testing
+2024-04-10 15:41:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 15:41:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 15:41:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 15:41:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 15:41:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 15:41:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 15:41:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 15:41:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 15:41:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 15:41:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 15:41:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 15:41:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 15:41:58 [DEBU] [:Collector] do testing
+2024-04-10 15:46:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 15:46:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 15:46:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 15:46:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 15:46:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 15:46:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 15:46:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 15:46:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 15:46:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 15:46:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 15:46:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 15:46:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 15:46:58 [DEBU] [:Collector] do testing
+2024-04-10 15:51:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 15:51:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 15:51:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 15:51:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 15:51:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 15:51:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 15:51:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 15:51:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 15:51:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 15:51:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 15:51:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 15:51:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 15:51:58 [DEBU] [:Collector] do testing
+2024-04-10 15:56:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 15:56:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 15:56:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 15:56:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 15:56:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 15:56:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 15:56:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 15:56:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 15:56:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 15:56:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 15:56:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 15:56:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 15:56:58 [DEBU] [:Collector] do testing
+2024-04-10 16:01:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 16:01:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 16:01:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 16:01:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 16:01:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 16:01:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 16:01:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 16:01:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 16:01:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 16:01:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 16:01:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 16:01:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 16:01:58 [DEBU] [:Collector] do testing
+2024-04-10 16:06:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 16:06:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 16:06:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 16:06:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 16:06:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 16:06:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 16:06:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 16:06:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 16:06:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 16:06:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 16:06:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 16:06:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 16:06:58 [DEBU] [:Collector] do testing
+2024-04-10 16:11:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 16:11:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 16:11:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 16:11:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 16:11:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 16:11:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 16:11:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 16:11:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 16:11:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 16:11:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 16:11:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 16:11:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 16:11:58 [DEBU] [:Collector] do testing
+2024-04-10 16:15:38 [DEBU] [:Collector] do testing
+2024-04-10 16:15:38 [INFO] [:Collector] start connectivity reporter
+2024-04-10 16:15:40 [WARN] [:Collector] [NodeID:1] pre ping: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp [::1]:5010: connectex: No connection could be made because the target machine actively refused it."
+2024-04-10 16:15:40 [INFO] start serving command server
+2024-04-10 16:15:40 [INFO] start serving distlock
+2024-04-10 16:15:40 [INFO] start serving grpc
+2024-04-10 16:16:26 [DEBU] [:Collector] do testing
+2024-04-10 16:16:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 16:16:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 16:16:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 16:16:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 16:16:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 16:16:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 16:16:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 16:16:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 16:16:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 16:16:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 16:16:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 16:16:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 16:21:29 [DEBU] [:Collector] do testing
+2024-04-10 16:21:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 16:21:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 16:21:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 16:21:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 16:21:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 16:21:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 16:21:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 16:21:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 16:21:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 16:21:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 16:21:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 16:21:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 16:26:29 [DEBU] [:Collector] do testing
+2024-04-10 16:26:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 16:26:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 16:26:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 16:26:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 16:26:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 16:26:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 16:26:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 16:26:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 16:26:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 16:26:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 16:26:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 16:26:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 16:31:29 [DEBU] [:Collector] do testing
+2024-04-10 16:31:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 16:31:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 16:31:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 16:31:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 16:31:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 16:31:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 16:31:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 16:31:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 16:31:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 16:31:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 16:31:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 16:31:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 16:36:29 [DEBU] [:Collector] do testing
+2024-04-10 16:36:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 16:36:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 16:36:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 16:36:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 16:36:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 16:36:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 16:36:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 16:36:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 16:36:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 16:36:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 16:36:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 16:36:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 16:41:29 [DEBU] [:Collector] do testing
+2024-04-10 16:41:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 16:41:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 16:41:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 16:41:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 16:41:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 16:41:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 16:41:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 16:41:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 16:41:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 16:41:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 16:41:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 16:41:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 16:46:29 [DEBU] [:Collector] do testing
+2024-04-10 16:46:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 16:46:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 16:46:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 16:46:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 16:46:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 16:46:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 16:46:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 16:46:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 16:46:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 16:46:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 16:46:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 16:46:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 16:51:29 [DEBU] [:Collector] do testing
+2024-04-10 16:51:31 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-10 16:51:31 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-10 16:51:31 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-10 16:51:31 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-10 16:51:31 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-10 16:51:31 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-10 16:51:31 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-10 16:51:31 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-10 16:51:31 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-10 16:51:31 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-10 16:51:31 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-10 16:51:31 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-10 16:59:00 [WARN] agent server err: deserialize error: channel is closed
+2024-04-10 16:59:00 [ERRO] command server stopped with error: receive message error: channel is closed
+2024-04-10 16:59:00 [INFO] command server stopped
+2024-04-10 16:59:00 [DEBU] [:Collector] do testing
+2024-04-10 17:01:29 [DEBU] [:Collector] do testing
+2024-04-10 17:06:29 [DEBU] [:Collector] do testing
+2024-04-10 17:11:29 [DEBU] [:Collector] do testing
+2024-04-10 17:16:29 [DEBU] [:Collector] do testing
+2024-04-10 17:21:29 [DEBU] [:Collector] do testing
+2024-04-10 17:26:29 [DEBU] [:Collector] do testing
+2024-04-10 20:05:49 [DEBU] [:Collector] do testing
+2024-04-10 20:06:29 [DEBU] [:Collector] do testing
+2024-04-10 20:11:29 [DEBU] [:Collector] do testing
+2024-04-10 20:16:29 [DEBU] [:Collector] do testing
+2024-04-10 20:25:54 [DEBU] [:Collector] do testing
+2024-04-10 20:26:29 [DEBU] [:Collector] do testing
+2024-04-10 20:32:03 [DEBU] [:Collector] do testing
+2024-04-10 20:36:29 [DEBU] [:Collector] do testing
+2024-04-10 20:41:29 [DEBU] [:Collector] do testing
+2024-04-10 20:46:29 [DEBU] [:Collector] do testing
+2024-04-10 20:51:29 [DEBU] [:Collector] do testing
+2024-04-10 20:56:29 [DEBU] [:Collector] do testing
+2024-04-10 21:01:29 [DEBU] [:Collector] do testing
+2024-04-10 21:06:29 [DEBU] [:Collector] do testing
+2024-04-10 21:11:29 [DEBU] [:Collector] do testing
+2024-04-10 21:16:29 [DEBU] [:Collector] do testing
+2024-04-10 21:21:29 [DEBU] [:Collector] do testing
+2024-04-10 21:26:29 [DEBU] [:Collector] do testing
+2024-04-10 21:50:58 [DEBU] [:Collector] do testing
+2024-04-10 21:51:29 [DEBU] [:Collector] do testing
+2024-04-11 00:53:07 [DEBU] [:Collector] do testing
+2024-04-11 08:52:58 [DEBU] [:Collector] do testing
+2024-04-11 08:56:29 [DEBU] [:Collector] do testing
+2024-04-11 09:01:29 [DEBU] [:Collector] do testing
+2024-04-11 09:06:29 [DEBU] [:Collector] do testing
+2024-04-11 09:11:29 [DEBU] [:Collector] do testing
+2024-04-11 09:16:29 [DEBU] [:Collector] do testing
+2024-04-11 09:22:59 [DEBU] [:Collector] do testing
+2024-04-11 09:22:59 [INFO] [:Collector] start connectivity reporter
+2024-04-11 09:23:01 [WARN] [:Collector] [NodeID:1] pre ping: rpc error: code = Unavailable desc = connection error: desc = "transport: Error while dialing: dial tcp [::1]:5010: connectex: No connection could be made because the target machine actively refused it."
+2024-04-11 09:23:02 [INFO] start serving command server
+2024-04-11 09:23:02 [INFO] start serving distlock
+2024-04-11 09:23:02 [INFO] start serving grpc
+2024-04-11 09:27:56 [DEBU] [:Collector] do testing
+2024-04-11 09:28:04 [DEBU] client upload file
+2024-04-11 09:28:04 [DEBU] 106 bytes received
+2024-04-11 09:28:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 09:28:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 09:28:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 09:28:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 09:28:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 09:28:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 09:28:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 09:28:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 09:28:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 09:28:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 09:28:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 09:28:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 09:32:59 [DEBU] [:Collector] do testing
+2024-04-11 09:33:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 09:33:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 09:33:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 09:33:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 09:33:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 09:33:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 09:33:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 09:33:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 09:33:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 09:33:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 09:33:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 09:33:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 09:37:59 [DEBU] [:Collector] do testing
+2024-04-11 09:38:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 09:38:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 09:38:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 09:38:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 09:38:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 09:38:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 09:38:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 09:38:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 09:38:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 09:38:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 09:38:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 09:38:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 09:42:59 [DEBU] [:Collector] do testing
+2024-04-11 09:43:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 09:43:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 09:43:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 09:43:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 09:43:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 09:43:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 09:43:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 09:43:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 09:43:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 09:43:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 09:43:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 09:43:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 09:47:59 [DEBU] [:Collector] do testing
+2024-04-11 09:48:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 09:48:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 09:48:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 09:48:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 09:48:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 09:48:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 09:48:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 09:48:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 09:48:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 09:48:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 09:48:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 09:48:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 09:52:59 [DEBU] [:Collector] do testing
+2024-04-11 09:53:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 09:53:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 09:53:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 09:53:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 09:53:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 09:53:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 09:53:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 09:53:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 09:53:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 09:53:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 09:53:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 09:53:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 09:57:59 [DEBU] [:Collector] do testing
+2024-04-11 09:58:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 09:58:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 09:58:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 09:58:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 09:58:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 09:58:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 09:58:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 09:58:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 09:58:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 09:58:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 09:58:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 09:58:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 10:02:59 [DEBU] [:Collector] do testing
+2024-04-11 10:03:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 10:03:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 10:03:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 10:03:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 10:03:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 10:03:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 10:03:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 10:03:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 10:03:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 10:03:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 10:03:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 10:03:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 10:07:59 [DEBU] [:Collector] do testing
+2024-04-11 10:08:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 10:08:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 10:08:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 10:08:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 10:08:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 10:08:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 10:08:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 10:08:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 10:08:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 10:08:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 10:08:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 10:08:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 10:12:59 [DEBU] [:Collector] do testing
+2024-04-11 10:13:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 10:13:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 10:13:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 10:13:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 10:13:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 10:13:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 10:13:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 10:13:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 10:13:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 10:13:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 10:13:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 10:13:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 10:17:59 [DEBU] [:Collector] do testing
+2024-04-11 10:18:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 10:18:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 10:18:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 10:18:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 10:18:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 10:18:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 10:18:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 10:18:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 10:18:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 10:18:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 10:18:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 10:18:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 10:22:59 [DEBU] [:Collector] do testing
+2024-04-11 10:23:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 10:23:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 10:23:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 10:23:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 10:23:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 10:23:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 10:23:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 10:23:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 10:23:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 10:23:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 10:23:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 10:23:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 10:27:59 [DEBU] [:Collector] do testing
+2024-04-11 10:28:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 10:28:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 10:28:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 10:28:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 10:28:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 10:28:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 10:28:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 10:28:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 10:28:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 10:28:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 10:28:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 10:28:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 10:32:59 [DEBU] [:Collector] do testing
+2024-04-11 10:33:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 10:33:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 10:33:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 10:33:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 10:33:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 10:33:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 10:33:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 10:33:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 10:33:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 10:33:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 10:33:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 10:33:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 10:37:59 [DEBU] [:Collector] do testing
+2024-04-11 10:38:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 10:38:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 10:38:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 10:38:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 10:38:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 10:38:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 10:38:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 10:38:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 10:38:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 10:38:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 10:38:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 10:38:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 10:42:59 [DEBU] [:Collector] do testing
+2024-04-11 10:43:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 10:43:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 10:43:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 10:43:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 10:43:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 10:43:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 10:43:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 10:43:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 10:43:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 10:43:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 10:43:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 10:43:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 10:47:59 [DEBU] [:Collector] do testing
+2024-04-11 10:48:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 10:48:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 10:48:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 10:48:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 10:48:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 10:48:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 10:48:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 10:48:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 10:48:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 10:48:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 10:48:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 10:48:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 10:52:59 [DEBU] [:Collector] do testing
+2024-04-11 10:53:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 10:53:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 10:53:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 10:53:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 10:53:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 10:53:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 10:53:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 10:53:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 10:53:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 10:53:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 10:53:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 10:53:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 10:57:59 [DEBU] [:Collector] do testing
+2024-04-11 10:58:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 10:58:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 10:58:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 10:58:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 10:58:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 10:58:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 10:58:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 10:58:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 10:58:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 10:58:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 10:58:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 10:58:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 11:02:59 [DEBU] [:Collector] do testing
+2024-04-11 11:03:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 11:03:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 11:03:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 11:03:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 11:03:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 11:03:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 11:03:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 11:03:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 11:03:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 11:03:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 11:03:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 11:03:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 11:07:59 [DEBU] [:Collector] do testing
+2024-04-11 11:08:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 11:08:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 11:08:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 11:08:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 11:08:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 11:08:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 11:08:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 11:08:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 11:08:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 11:08:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 11:08:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 11:08:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 11:12:59 [DEBU] [:Collector] do testing
+2024-04-11 11:13:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 11:13:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 11:13:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 11:13:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 11:13:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 11:13:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 11:13:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 11:13:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 11:13:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 11:13:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 11:13:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 11:13:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 11:17:59 [DEBU] [:Collector] do testing
+2024-04-11 11:18:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 11:18:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 11:18:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 11:18:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 11:18:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 11:18:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 11:18:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 11:18:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 11:18:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 11:18:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 11:18:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 11:18:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 11:22:59 [DEBU] [:Collector] do testing
+2024-04-11 11:23:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 11:23:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 11:23:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 11:23:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 11:23:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 11:23:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 11:23:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 11:23:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 11:23:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 11:23:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 11:23:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 11:23:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 11:27:59 [DEBU] [:Collector] do testing
+2024-04-11 11:28:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 11:28:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 11:28:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 11:28:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 11:28:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 11:28:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 11:28:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 11:28:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 11:28:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 11:28:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 11:28:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 11:28:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 11:32:59 [DEBU] [:Collector] do testing
+2024-04-11 11:33:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 11:33:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 11:33:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 11:33:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 11:33:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 11:33:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 11:33:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 11:33:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 11:33:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 11:33:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 11:33:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 11:33:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 11:37:59 [DEBU] [:Collector] do testing
+2024-04-11 11:38:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 11:38:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 11:38:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 11:38:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 11:38:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 11:38:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 11:38:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 11:38:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 11:38:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 11:38:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 11:38:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 11:38:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 11:42:59 [DEBU] [:Collector] do testing
+2024-04-11 11:43:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 11:43:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 11:43:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 11:43:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 11:43:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 11:43:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 11:43:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 11:43:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 11:43:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 11:43:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 11:43:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 11:43:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 11:47:59 [DEBU] [:Collector] do testing
+2024-04-11 11:48:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 11:48:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 11:48:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 11:48:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 11:48:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 11:48:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 11:48:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 11:48:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 11:48:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 11:48:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 11:48:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 11:48:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 11:52:59 [DEBU] [:Collector] do testing
+2024-04-11 11:53:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 11:53:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 11:53:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 11:53:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 11:53:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 11:53:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 11:53:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 11:53:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 11:53:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 11:53:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 11:53:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 11:53:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 11:57:59 [DEBU] [:Collector] do testing
+2024-04-11 11:58:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 11:58:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 11:58:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 11:58:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 11:58:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 11:58:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 11:58:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 11:58:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 11:58:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 11:58:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 11:58:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 11:58:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 12:02:59 [DEBU] [:Collector] do testing
+2024-04-11 12:03:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 12:03:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 12:03:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 12:03:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 12:03:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 12:03:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 12:03:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 12:03:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 12:03:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 12:03:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 12:03:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 12:03:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 12:07:59 [DEBU] [:Collector] do testing
+2024-04-11 12:08:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 12:08:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 12:08:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 12:08:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 12:08:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 12:08:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 12:08:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 12:08:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 12:08:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 12:08:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 12:08:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 12:08:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 12:12:59 [DEBU] [:Collector] do testing
+2024-04-11 12:13:12 [WARN] parsing user id $RECYCLE.BIN: strconv.ParseInt: parsing "$RECYCLE.BIN": invalid syntax
+2024-04-11 12:13:12 [WARN] parsing user id AppGallery: strconv.ParseInt: parsing "AppGallery": invalid syntax
+2024-04-11 12:13:12 [WARN] parsing user id BrowerDownload: strconv.ParseInt: parsing "BrowerDownload": invalid syntax
+2024-04-11 12:13:12 [WARN] parsing user id Config.Msi: strconv.ParseInt: parsing "Config.Msi": invalid syntax
+2024-04-11 12:13:12 [WARN] parsing user id Huawei Share: strconv.ParseInt: parsing "Huawei Share": invalid syntax
+2024-04-11 12:13:12 [WARN] parsing user id Others: strconv.ParseInt: parsing "Others": invalid syntax
+2024-04-11 12:13:12 [WARN] parsing user id Soft: strconv.ParseInt: parsing "Soft": invalid syntax
+2024-04-11 12:13:12 [WARN] parsing user id SoftPackage: strconv.ParseInt: parsing "SoftPackage": invalid syntax
+2024-04-11 12:13:12 [WARN] parsing user id System Volume Information: strconv.ParseInt: parsing "System Volume Information": invalid syntax
+2024-04-11 12:13:12 [WARN] parsing user id Work: strconv.ParseInt: parsing "Work": invalid syntax
+2024-04-11 12:13:12 [WARN] parsing user id draw-graph: strconv.ParseInt: parsing "draw-graph": invalid syntax
+2024-04-11 12:13:12 [WARN] parsing user id tmp: strconv.ParseInt: parsing "tmp": invalid syntax
+2024-04-11 12:19:52 [DEBU] [:Collector] do testing
+2024-04-11 12:19:52 [WARN] agent server err: deserialize error: channel is closed
+2024-04-11 12:19:52 [ERRO] command server stopped with error: receive message error: channel is closed
+2024-04-11 12:19:52 [INFO] command server stopped
diff --git a/log/coordinator.log b/log/coordinator.log
new file mode 100644
index 0000000..3250035
--- /dev/null
+++ b/log/coordinator.log
@@ -0,0 +1,17 @@
+2024-04-10 12:34:16 [INFO] start serving command server
+2024-04-10 13:04:14 [WARN] coordinator server err: deserialize error: channel is closed
+2024-04-10 13:04:14 [ERRO] command server stopped with error: receive message error: channel is closed
+2024-04-10 13:04:14 [INFO] command server stopped
+2024-04-10 14:41:25 [INFO] start serving command server
+2024-04-10 16:59:00 [WARN] coordinator server err: deserialize error: channel is closed
+2024-04-10 16:59:00 [ERRO] command server stopped with error: receive message error: channel is closed
+2024-04-10 16:59:00 [INFO] command server stopped
+2024-04-10 17:06:56 [INFO] start serving command server
+2024-04-10 17:07:36 [INFO] start serving command server
+2024-04-10 20:05:49 [WARN] coordinator server err: deserialize error: channel is closed
+2024-04-10 20:05:49 [ERRO] command server stopped with error: receive message error: channel is closed
+2024-04-10 20:05:49 [INFO] command server stopped
+2024-04-11 09:22:23 [INFO] start serving command server
+2024-04-11 12:19:52 [WARN] coordinator server err: deserialize error: channel is closed
+2024-04-11 12:19:52 [ERRO] command server stopped with error: receive message error: channel is closed
+2024-04-11 12:19:52 [INFO] command server stopped
diff --git a/log/scanner.log b/log/scanner.log
new file mode 100644
index 0000000..f071888
--- /dev/null
+++ b/log/scanner.log
@@ -0,0 +1,2659 @@
+2024-04-10 12:44:28 [INFO] start serving distlock
+2024-04-10 12:44:28 [INFO] start serving event executor
+2024-04-10 12:44:28 [INFO] start serving scanner server
+2024-04-10 13:04:14 [WARN] agent server err: deserialize error: channel is closed
+2024-04-10 13:04:14 [ERRO] scanner server stopped with error: receive message error: channel is closed
+2024-04-10 13:04:14 [INFO] scanner server stopped
+2024-04-10 13:04:14 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 13:04:14 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 13:04:14 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 13:04:14 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 13:04:14 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 13:04:14 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 13:04:14 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 13:04:14 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 13:04:14 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 13:04:14 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 13:04:14 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 13:04:14 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 13:04:14 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 13:04:14 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 13:04:14 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 13:04:14 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 13:04:48 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 13:04:48 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 13:04:48 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 13:04:51 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 13:04:51 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 13:04:55 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 13:04:55 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 13:04:55 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 13:05:14 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: wait response timeout
+2024-04-10 13:05:14 [DEBU] [Event:AgentCheckCache] end, time: 1m0.0530607s
+2024-04-10 13:05:14 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 13:05:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 13:05:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 13:05:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 13:05:44 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: wait response timeout
+2024-04-10 13:05:44 [DEBU] [Event:AgentCheckState] end
+2024-04-10 13:05:44 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 13:06:14 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: wait response timeout
+2024-04-10 13:06:14 [DEBU] [Event:AgentCheckState] end
+2024-04-10 13:06:14 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 13:06:14 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 13:06:14 [DEBU] [Event:CheckPackage] begin with , PackageIDs:
+2024-04-10 13:06:14 [DEBU] [Event:CheckPackage] end
+2024-04-10 13:06:14 [DEBU] [Event:CheckPackage] begin with , PackageIDs:
+2024-04-10 13:06:14 [DEBU] [Event:CheckPackage] end
+2024-04-10 13:06:14 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 13:06:14 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 13:06:14 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 13:07:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 13:07:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 13:07:03 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 13:07:03 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 13:07:14 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: wait response timeout
+2024-04-10 13:07:14 [DEBU] [Event:AgentCheckCache] end, time: 1m0.0069554s
+2024-04-10 13:09:48 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 13:09:48 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 13:09:48 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 13:09:48 [DEBU] [Event:CheckPackage] begin with , PackageIDs:
+2024-04-10 13:09:48 [DEBU] [Event:CheckPackage] end
+2024-04-10 13:09:51 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 13:09:51 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 13:09:51 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 13:09:55 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 13:09:55 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 13:09:55 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 13:10:21 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: wait response timeout
+2024-04-10 13:10:21 [DEBU] [Event:AgentCheckState] end
+2024-04-10 13:10:21 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 13:10:21 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 13:10:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 13:10:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 13:10:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 13:10:26 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 13:11:26 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: wait response timeout
+2024-04-10 13:11:26 [DEBU] [Event:AgentCheckCache] end, time: 1m0.0084534s
+2024-04-10 13:12:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 13:12:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 13:12:03 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 13:12:03 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 13:14:48 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 13:14:48 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 13:14:48 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 13:14:48 [DEBU] [Event:CheckPackage] begin with , PackageIDs:
+2024-04-10 13:14:48 [DEBU] [Event:CheckPackage] end
+2024-04-10 13:14:51 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 13:14:51 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 13:14:51 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 13:14:55 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 13:14:55 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 13:14:55 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 13:15:21 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: wait response timeout
+2024-04-10 13:15:21 [DEBU] [Event:AgentCheckState] end
+2024-04-10 13:15:21 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 13:15:21 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 13:15:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 13:15:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 13:15:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 13:15:26 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 13:16:26 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: wait response timeout
+2024-04-10 13:16:26 [DEBU] [Event:AgentCheckCache] end, time: 1m0.0124099s
+2024-04-10 13:17:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 13:17:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 13:17:03 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 13:17:03 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 13:19:48 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 13:19:48 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 13:19:48 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 13:19:48 [DEBU] [Event:CheckPackage] begin with , PackageIDs:
+2024-04-10 13:19:48 [DEBU] [Event:CheckPackage] end
+2024-04-10 13:19:51 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 13:19:51 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 13:19:51 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 13:19:55 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 13:19:55 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 13:19:55 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 13:20:21 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: wait response timeout
+2024-04-10 13:20:21 [DEBU] [Event:AgentCheckState] end
+2024-04-10 13:20:21 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 13:20:21 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 13:20:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 13:20:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 13:20:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 13:20:26 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 13:21:26 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: wait response timeout
+2024-04-10 13:21:26 [DEBU] [Event:AgentCheckCache] end, time: 1m0.0053361s
+2024-04-10 13:22:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 13:22:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 13:22:03 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 13:22:03 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 13:24:48 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 13:24:48 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 13:24:48 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 13:24:48 [DEBU] [Event:CheckPackage] begin with , PackageIDs:
+2024-04-10 13:24:48 [DEBU] [Event:CheckPackage] end
+2024-04-10 13:24:51 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 13:24:51 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 13:24:51 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 13:24:55 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 13:24:55 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 13:24:55 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 13:25:21 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: wait response timeout
+2024-04-10 13:25:21 [DEBU] [Event:AgentCheckState] end
+2024-04-10 13:25:21 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 13:25:21 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 13:25:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 13:25:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 13:25:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 13:25:26 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 13:26:26 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: wait response timeout
+2024-04-10 13:26:26 [DEBU] [Event:AgentCheckCache] end, time: 1m0.0094709s
+2024-04-10 13:27:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 13:27:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 13:27:03 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 13:27:03 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 13:29:48 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 13:29:48 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 13:29:48 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 13:29:48 [DEBU] [Event:CheckPackage] begin with , PackageIDs:
+2024-04-10 13:29:48 [DEBU] [Event:CheckPackage] end
+2024-04-10 13:29:51 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 13:29:51 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 13:29:51 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 13:29:55 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 13:29:55 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 13:29:55 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 13:30:21 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: wait response timeout
+2024-04-10 13:30:21 [DEBU] [Event:AgentCheckState] end
+2024-04-10 13:30:21 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 13:30:21 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 13:30:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 13:30:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 13:30:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 13:30:26 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 13:31:26 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: wait response timeout
+2024-04-10 13:31:26 [DEBU] [Event:AgentCheckCache] end, time: 1m0.0041807s
+2024-04-10 13:32:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 13:32:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 13:32:03 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 13:32:03 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 14:01:30 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 14:01:30 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 14:01:30 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 14:01:30 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 14:01:30 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 14:01:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 14:01:30 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 14:01:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 14:01:30 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 14:01:30 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 14:01:30 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 14:01:30 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 14:01:30 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 14:01:30 [DEBU] [Event:AgentCheckState] end
+2024-04-10 14:01:30 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 14:01:30 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 14:01:30 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 14:01:30 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 14:01:30 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 14:01:30 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 14:01:30 [DEBU] [Event:CheckPackage] begin with , PackageIDs:
+2024-04-10 14:01:30 [DEBU] [Event:CheckPackage] end
+2024-04-10 14:01:30 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 14:01:30 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 14:01:30 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 14:02:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 14:02:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 14:02:03 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 14:02:03 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 14:04:48 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 14:04:48 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 14:04:48 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 14:04:48 [DEBU] [Event:CheckPackage] begin with , PackageIDs:
+2024-04-10 14:04:48 [DEBU] [Event:CheckPackage] end
+2024-04-10 14:04:51 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 14:04:51 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 14:04:51 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 14:04:51 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 14:04:51 [DEBU] [Event:AgentCheckState] end
+2024-04-10 14:04:55 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 14:04:55 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 14:04:55 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 14:04:55 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 14:04:55 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 14:05:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 14:05:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 14:05:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 14:05:26 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 14:05:26 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 14:05:26 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 14:07:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 14:07:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 14:07:03 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 14:07:03 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 14:09:48 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 14:09:48 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 14:09:48 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 14:09:48 [DEBU] [Event:CheckPackage] begin with , PackageIDs:
+2024-04-10 14:09:48 [DEBU] [Event:CheckPackage] end
+2024-04-10 14:09:51 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 14:09:51 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 14:09:51 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 14:09:51 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 14:09:51 [DEBU] [Event:AgentCheckState] end
+2024-04-10 14:09:55 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 14:09:55 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 14:09:55 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 14:09:55 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 14:09:55 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 14:10:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 14:10:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 14:10:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 14:10:26 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 14:10:26 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 14:10:26 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 14:12:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 14:12:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 14:12:03 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 14:12:03 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 14:14:48 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 14:14:48 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 14:14:48 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 14:14:48 [DEBU] [Event:CheckPackage] begin with , PackageIDs:
+2024-04-10 14:14:48 [DEBU] [Event:CheckPackage] end
+2024-04-10 14:14:51 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 14:14:51 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 14:14:51 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 14:14:51 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 14:14:51 [DEBU] [Event:AgentCheckState] end
+2024-04-10 14:14:55 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 14:14:55 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 14:14:55 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 14:14:55 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 14:14:55 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 14:15:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 14:15:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 14:15:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 14:15:26 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 14:15:26 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 14:15:26 [DEBU] [Event:AgentCheckCache] end, time: 505.9µs
+2024-04-10 14:17:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 14:17:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 14:17:03 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 14:17:03 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 14:19:48 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 14:19:48 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 14:19:48 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 14:19:48 [DEBU] [Event:CheckPackage] begin with , PackageIDs:
+2024-04-10 14:19:48 [DEBU] [Event:CheckPackage] end
+2024-04-10 14:19:51 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 14:19:51 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 14:19:51 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 14:19:51 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 14:19:51 [DEBU] [Event:AgentCheckState] end
+2024-04-10 14:19:55 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 14:19:55 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 14:19:55 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 14:19:55 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 14:19:55 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 14:20:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 14:20:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 14:20:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 14:20:26 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 14:20:26 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 14:20:26 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 14:22:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 14:22:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 14:22:03 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 14:22:03 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 14:24:48 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 14:24:48 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 14:24:48 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 14:24:48 [DEBU] [Event:CheckPackage] begin with , PackageIDs:
+2024-04-10 14:24:48 [DEBU] [Event:CheckPackage] end
+2024-04-10 14:24:51 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 14:24:51 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 14:24:51 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 14:24:51 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 14:24:51 [DEBU] [Event:AgentCheckState] end
+2024-04-10 14:24:55 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 14:24:55 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 14:24:55 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 14:24:55 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 14:24:55 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 14:25:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 14:25:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 14:25:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 14:25:26 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 14:25:26 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 14:25:26 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 14:27:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 14:27:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 14:27:03 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 14:27:03 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 14:29:48 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 14:29:48 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 14:29:48 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 14:29:48 [DEBU] [Event:CheckPackage] begin with , PackageIDs:
+2024-04-10 14:29:48 [DEBU] [Event:CheckPackage] end
+2024-04-10 14:29:51 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 14:29:51 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 14:29:51 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 14:29:51 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 14:29:51 [DEBU] [Event:AgentCheckState] end
+2024-04-10 14:29:55 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 14:29:55 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 14:29:55 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 14:29:55 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 14:29:55 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 14:30:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 14:30:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 14:30:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 14:30:26 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 14:30:26 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 14:30:26 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 14:32:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 14:32:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 14:32:03 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 14:32:03 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 14:34:48 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 14:34:48 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 14:34:48 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 14:34:48 [DEBU] [Event:CheckPackage] begin with , PackageIDs:
+2024-04-10 14:34:48 [DEBU] [Event:CheckPackage] end
+2024-04-10 14:34:51 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 14:34:51 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 14:34:51 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 14:34:51 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 14:34:51 [DEBU] [Event:AgentCheckState] end
+2024-04-10 14:34:55 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 14:34:55 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 14:34:55 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 14:34:55 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 14:34:55 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 14:35:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 14:35:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 14:35:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 14:35:26 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 14:35:26 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 14:35:26 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 14:37:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 14:37:01 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 14:37:03 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 14:37:03 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 14:39:48 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 14:39:48 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 14:39:48 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 14:39:48 [DEBU] [Event:CheckPackage] begin with , PackageIDs:
+2024-04-10 14:39:48 [DEBU] [Event:CheckPackage] end
+2024-04-10 14:39:51 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 14:39:51 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 14:39:51 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 14:39:51 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 14:39:51 [DEBU] [Event:AgentCheckState] end
+2024-04-10 14:39:55 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 14:39:55 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 14:39:55 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 14:39:55 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 14:39:55 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 14:40:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 14:40:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 14:40:26 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 14:40:26 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 14:40:26 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 14:40:26 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 14:40:56 [INFO] start serving distlock
+2024-04-10 14:40:56 [INFO] start serving event executor
+2024-04-10 14:40:56 [INFO] start serving scanner server
+2024-04-10 14:45:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 14:45:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 14:45:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 14:45:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 14:45:59 [DEBU] [Event:AgentCheckCache] end, time: 59.1614ms
+2024-04-10 14:46:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 14:46:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 14:46:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 14:46:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 14:46:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 14:46:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 14:46:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 14:46:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 14:46:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 14:46:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 14:46:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 14:46:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 14:46:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 14:46:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 14:48:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 14:48:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 14:49:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 14:49:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 14:50:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 14:50:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 14:50:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 14:50:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 14:50:59 [DEBU] [Event:AgentCheckCache] end, time: 12.2963ms
+2024-04-10 14:51:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 14:51:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 14:51:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 14:51:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 14:51:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 14:51:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 14:51:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 14:51:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 14:51:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 14:51:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 14:51:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 14:51:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 14:51:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 14:51:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 14:53:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 14:53:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 14:54:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 14:54:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 14:55:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 14:55:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 14:55:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 14:55:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 14:55:59 [DEBU] [Event:AgentCheckCache] end, time: 13.6984ms
+2024-04-10 14:56:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 14:56:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 14:56:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 14:56:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 14:56:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 14:56:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 14:56:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 14:56:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 14:56:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 14:56:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 14:56:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 14:56:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 14:56:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 14:56:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 14:58:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 14:58:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 14:59:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 14:59:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 15:00:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 15:00:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 15:00:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 15:00:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 15:00:59 [DEBU] [Event:AgentCheckCache] end, time: 10.9303ms
+2024-04-10 15:01:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 15:01:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 15:01:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 15:01:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 15:01:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 15:01:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 15:01:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 15:01:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 15:01:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 15:01:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 15:01:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 15:01:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 15:01:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 15:01:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 15:03:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 15:03:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 15:04:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 15:04:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 15:05:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 15:05:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 15:05:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 15:05:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 15:05:59 [DEBU] [Event:AgentCheckCache] end, time: 16.2387ms
+2024-04-10 15:06:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 15:06:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 15:06:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 15:06:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 15:06:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 15:06:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 15:06:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 15:06:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 15:06:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 15:06:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 15:06:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 15:06:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 15:06:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 15:06:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 15:08:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 15:08:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 15:09:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 15:09:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 15:10:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 15:10:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 15:10:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 15:10:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 15:10:59 [DEBU] [Event:AgentCheckCache] end, time: 16.346ms
+2024-04-10 15:11:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 15:11:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 15:11:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 15:11:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 15:11:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 15:11:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 15:11:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 15:11:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 15:11:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 15:11:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 15:11:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 15:11:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 15:11:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 15:11:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 15:13:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 15:13:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 15:14:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 15:14:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 15:15:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 15:15:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 15:15:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 15:15:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 15:15:59 [DEBU] [Event:AgentCheckCache] end, time: 9.4523ms
+2024-04-10 15:16:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 15:16:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 15:16:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 15:16:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 15:16:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 15:16:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 15:16:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 15:16:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 15:16:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 15:16:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 15:16:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 15:16:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 15:16:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 15:16:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 15:18:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 15:18:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 15:19:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 15:19:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 15:20:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 15:20:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 15:20:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 15:20:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 15:20:59 [DEBU] [Event:AgentCheckCache] end, time: 10.5391ms
+2024-04-10 15:21:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 15:21:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 15:21:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 15:21:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 15:21:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 15:21:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 15:21:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 15:21:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 15:21:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 15:21:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 15:21:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 15:21:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 15:21:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 15:21:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 15:23:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 15:23:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 15:24:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 15:24:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 15:25:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 15:25:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 15:25:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 15:25:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 15:25:59 [DEBU] [Event:AgentCheckCache] end, time: 6.7143ms
+2024-04-10 15:26:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 15:26:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 15:26:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 15:26:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 15:26:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 15:26:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 15:26:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 15:26:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 15:26:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 15:26:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 15:26:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 15:26:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 15:26:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 15:26:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 15:28:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 15:28:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 15:29:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 15:29:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 15:30:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 15:30:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 15:30:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 15:30:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 15:30:59 [DEBU] [Event:AgentCheckCache] end, time: 6.3131ms
+2024-04-10 15:31:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 15:31:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 15:31:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 15:31:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 15:31:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 15:31:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 15:31:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 15:31:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 15:31:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 15:31:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 15:31:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 15:31:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 15:31:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 15:31:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 15:33:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 15:33:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 15:34:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 15:34:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 15:35:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 15:35:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 15:35:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 15:35:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 15:35:59 [DEBU] [Event:AgentCheckCache] end, time: 6.5453ms
+2024-04-10 15:36:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 15:36:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 15:36:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 15:36:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 15:36:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 15:36:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 15:36:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 15:36:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 15:36:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 15:36:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 15:36:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 15:36:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 15:36:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 15:36:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 15:38:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 15:38:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 15:39:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 15:39:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 15:40:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 15:40:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 15:40:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 15:40:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 15:40:59 [DEBU] [Event:AgentCheckCache] end, time: 7.2991ms
+2024-04-10 15:41:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 15:41:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 15:41:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 15:41:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 15:41:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 15:41:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 15:41:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 15:41:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 15:41:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 15:41:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 15:41:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 15:41:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 15:41:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 15:41:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 15:43:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 15:43:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 15:44:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 15:44:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 15:45:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 15:45:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 15:45:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 15:45:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 15:45:59 [DEBU] [Event:AgentCheckCache] end, time: 7.1274ms
+2024-04-10 15:46:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 15:46:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 15:46:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 15:46:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 15:46:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 15:46:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 15:46:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 15:46:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 15:46:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 15:46:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 15:46:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 15:46:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 15:46:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 15:46:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 15:48:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 15:48:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 15:49:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 15:49:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 15:50:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 15:50:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 15:50:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 15:50:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 15:50:59 [DEBU] [Event:AgentCheckCache] end, time: 4.0393ms
+2024-04-10 15:51:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 15:51:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 15:51:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 15:51:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 15:51:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 15:51:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 15:51:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 15:51:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 15:51:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 15:51:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 15:51:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 15:51:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 15:51:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 15:51:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 15:53:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 15:53:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 15:54:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 15:54:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 15:55:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 15:55:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 15:55:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 15:55:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 15:55:59 [DEBU] [Event:AgentCheckCache] end, time: 3.7358ms
+2024-04-10 15:56:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 15:56:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 15:56:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 15:56:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 15:56:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 15:56:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 15:56:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 15:56:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 15:56:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 15:56:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 15:56:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 15:56:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 15:56:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 15:56:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 15:58:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 15:58:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 15:59:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 15:59:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 16:00:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 16:00:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 16:00:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 16:00:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 16:00:59 [DEBU] [Event:AgentCheckCache] end, time: 3.5814ms
+2024-04-10 16:01:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 16:01:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 16:01:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 16:01:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 16:01:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 16:01:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 16:01:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 16:01:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 16:01:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 16:01:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 16:01:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 16:01:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 16:01:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 16:01:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 16:03:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 16:03:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 16:04:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 16:04:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 16:05:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 16:05:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 16:05:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 16:05:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 16:05:59 [DEBU] [Event:AgentCheckCache] end, time: 4.0942ms
+2024-04-10 16:06:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 16:06:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 16:06:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 16:06:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 16:06:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 16:06:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 16:06:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 16:06:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 16:06:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 16:06:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 16:06:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 16:06:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 16:06:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 16:06:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 16:08:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 16:08:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 16:09:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 16:09:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 16:10:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 16:10:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 16:10:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 16:10:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 16:10:59 [DEBU] [Event:AgentCheckCache] end, time: 8.7481ms
+2024-04-10 16:11:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 16:11:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 16:11:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 16:11:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 16:11:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 16:11:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 16:11:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 16:11:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 16:11:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 16:11:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 16:11:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 16:11:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 16:11:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 16:11:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 16:13:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 16:13:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 16:14:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 16:14:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 16:15:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 16:15:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 16:15:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 16:15:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 16:15:59 [DEBU] [Event:AgentCheckCache] end, time: 4.5685ms
+2024-04-10 16:16:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 16:16:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 16:16:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 16:16:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 16:16:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 16:16:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 16:16:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 16:16:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 16:16:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 16:16:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 16:16:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 16:16:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 16:16:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 16:16:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 16:18:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 16:18:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 16:19:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 16:19:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 16:20:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 16:20:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 16:20:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 16:20:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 16:20:59 [DEBU] [Event:AgentCheckCache] end, time: 6.4745ms
+2024-04-10 16:21:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 16:21:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 16:21:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 16:21:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 16:21:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 16:21:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 16:21:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 16:21:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 16:21:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 16:21:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 16:21:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 16:21:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 16:21:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 16:21:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 16:23:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 16:23:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 16:24:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 16:24:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 16:25:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 16:25:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 16:25:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 16:25:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 16:25:59 [DEBU] [Event:AgentCheckCache] end, time: 6.253ms
+2024-04-10 16:26:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 16:26:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 16:26:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 16:26:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 16:26:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 16:26:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 16:26:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 16:26:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 16:26:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 16:26:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 16:26:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 16:26:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 16:26:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 16:26:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 16:28:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 16:28:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 16:29:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 16:29:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 16:30:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 16:30:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 16:30:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 16:30:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 16:30:59 [DEBU] [Event:AgentCheckCache] end, time: 3.6166ms
+2024-04-10 16:31:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 16:31:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 16:31:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 16:31:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 16:31:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 16:31:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 16:31:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 16:31:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 16:31:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 16:31:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 16:31:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 16:31:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 16:31:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 16:31:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 16:33:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 16:33:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 16:34:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 16:34:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 16:35:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 16:35:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 16:35:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 16:35:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 16:35:59 [DEBU] [Event:AgentCheckCache] end, time: 6.7347ms
+2024-04-10 16:36:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 16:36:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 16:36:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 16:36:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 16:36:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 16:36:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 16:36:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 16:36:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 16:36:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 16:36:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 16:36:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 16:36:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 16:36:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 16:36:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 16:38:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 16:38:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 16:39:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 16:39:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 16:40:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 16:40:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 16:40:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 16:40:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 16:40:59 [DEBU] [Event:AgentCheckCache] end, time: 5.8023ms
+2024-04-10 16:41:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 16:41:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 16:41:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 16:41:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 16:41:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 16:41:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 16:41:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 16:41:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 16:41:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 16:41:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 16:41:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 16:41:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 16:41:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 16:41:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 16:43:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 16:43:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 16:44:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 16:44:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 16:45:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 16:45:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 16:45:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 16:45:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 16:45:59 [DEBU] [Event:AgentCheckCache] end, time: 3.933ms
+2024-04-10 16:46:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 16:46:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 16:46:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 16:46:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 16:46:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 16:46:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 16:46:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 16:46:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 16:46:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 16:46:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 16:46:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 16:46:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 16:46:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 16:46:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 16:48:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 16:48:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 16:49:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 16:49:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 16:50:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 16:50:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 16:50:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 16:50:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 16:50:59 [DEBU] [Event:AgentCheckCache] end, time: 7.4485ms
+2024-04-10 16:51:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 16:51:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 16:51:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 16:51:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 16:51:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 16:51:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 16:51:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 16:51:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 16:51:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 16:51:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 16:51:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 16:51:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 16:51:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 16:51:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 16:53:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 16:53:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 16:54:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 16:54:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 16:59:00 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 16:59:00 [WARN] agent server err: deserialize error: channel is closed
+2024-04-10 16:59:00 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 16:59:00 [ERRO] scanner server stopped with error: receive message error: channel is closed
+2024-04-10 16:59:00 [INFO] scanner server stopped
+2024-04-10 16:59:00 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 16:59:00 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 16:59:00 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 16:59:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 16:59:00 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 16:59:00 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 16:59:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 16:59:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 16:59:00 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 16:59:00 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 16:59:00 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 16:59:00 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 16:59:00 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 16:59:00 [DEBU] [Event:AgentCheckState] end
+2024-04-10 16:59:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 16:59:00 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 16:59:00 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 16:59:00 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 16:59:00 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 16:59:00 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 16:59:00 [DEBU] [Event:CheckPackage] end
+2024-04-10 16:59:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 16:59:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 17:00:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 17:00:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 17:00:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 17:00:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 17:00:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 17:00:59 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 17:01:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 17:01:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 17:01:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 17:01:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 17:01:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 17:01:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 17:01:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 17:01:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 17:01:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 17:01:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 17:01:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 17:01:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 17:01:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 17:01:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 17:01:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 17:03:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 17:03:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 17:04:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 17:04:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 17:05:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 17:05:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 17:05:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 17:05:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 17:05:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 17:05:59 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 17:06:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 17:06:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 17:06:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 17:06:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 17:06:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 17:06:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 17:06:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 17:06:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 17:06:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 17:06:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 17:06:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 17:06:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 17:06:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 17:06:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 17:06:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 17:08:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 17:08:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 17:09:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 17:09:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 17:10:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 17:10:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 17:10:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 17:10:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 17:10:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 17:10:59 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 17:11:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 17:11:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 17:11:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 17:11:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 17:11:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 17:11:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 17:11:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 17:11:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 17:11:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 17:11:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 17:11:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 17:11:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 17:11:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 17:11:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 17:11:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 17:13:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 17:13:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 17:14:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 17:14:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 17:15:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 17:15:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 17:15:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 17:15:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 17:15:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 17:15:59 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 17:16:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 17:16:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 17:16:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 17:16:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 17:16:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 17:16:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 17:16:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 17:16:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 17:16:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 17:16:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 17:16:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 17:16:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 17:16:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 17:16:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 17:16:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 17:18:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 17:18:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 17:19:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 17:19:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 17:20:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 17:20:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 17:20:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 17:20:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 17:20:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 17:20:59 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 17:21:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 17:21:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 17:21:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 17:21:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 17:21:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 17:21:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 17:21:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 17:21:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 17:21:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 17:21:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 17:21:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 17:21:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 17:21:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 17:21:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 17:21:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 17:23:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 17:23:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 17:24:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 17:24:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 17:25:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 17:25:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 17:25:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 17:25:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 17:25:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 17:25:59 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 17:26:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 17:26:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 17:26:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 17:26:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 17:26:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 17:26:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 17:26:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 17:26:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 17:26:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 17:26:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 17:26:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 17:26:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 17:26:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 17:26:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 17:26:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 20:05:49 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 20:05:49 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 20:05:49 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 20:05:49 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 20:05:49 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 20:05:49 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 20:05:49 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 20:05:49 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 20:05:49 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 20:05:49 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 20:05:49 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 20:05:49 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 20:05:49 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 20:05:49 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 20:05:49 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 20:05:49 [DEBU] [Event:CheckPackage] end
+2024-04-10 20:05:49 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 20:05:49 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 20:05:49 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 20:05:49 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:05:49 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 20:05:49 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 20:05:49 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 20:05:49 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:05:49 [DEBU] [Event:AgentCheckState] end
+2024-04-10 20:05:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 20:05:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 20:05:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 20:05:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 20:05:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:05:59 [DEBU] [Event:AgentCheckCache] end, time: 527.8µs
+2024-04-10 20:06:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 20:06:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 20:06:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 20:06:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:06:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 20:06:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 20:06:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 20:06:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 20:06:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 20:06:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 20:06:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 20:06:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 20:06:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 20:06:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 20:06:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 20:08:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 20:08:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 20:09:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 20:09:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 20:10:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 20:10:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 20:10:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 20:10:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 20:10:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:10:59 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 20:11:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 20:11:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 20:11:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 20:11:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:11:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 20:11:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 20:11:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 20:11:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 20:11:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 20:11:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 20:11:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 20:11:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 20:11:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 20:11:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 20:11:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 20:13:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 20:13:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 20:14:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 20:14:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 20:15:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 20:15:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 20:15:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 20:15:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 20:15:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:15:59 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 20:16:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 20:16:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 20:16:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 20:16:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:16:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 20:16:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 20:16:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 20:16:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 20:16:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 20:16:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 20:16:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 20:16:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 20:16:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 20:16:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 20:16:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 20:18:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 20:18:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 20:19:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 20:19:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 20:25:54 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 20:25:54 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 20:25:54 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 20:25:54 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 20:25:54 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 20:25:54 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 20:25:54 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 20:25:54 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 20:25:54 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 20:25:54 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 20:25:54 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 20:25:54 [DEBU] [Event:CheckPackage] end
+2024-04-10 20:25:54 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 20:25:54 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 20:25:54 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 20:25:54 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 20:25:54 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 20:25:54 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 20:25:54 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:25:55 [DEBU] [Event:AgentCheckState] end
+2024-04-10 20:25:55 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 20:25:55 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 20:25:55 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 20:25:55 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:25:55 [DEBU] [Event:AgentCheckCache] end, time: 716.5µs
+2024-04-10 20:25:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 20:25:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 20:25:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 20:25:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 20:25:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:25:59 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 20:26:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 20:26:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 20:26:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 20:26:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:26:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 20:26:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 20:26:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 20:26:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 20:26:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 20:26:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 20:26:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 20:26:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 20:26:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 20:26:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 20:26:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 20:28:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 20:28:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 20:29:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 20:29:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 20:32:03 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 20:32:03 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 20:32:03 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 20:32:03 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 20:32:03 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 20:32:03 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 20:32:03 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 20:32:03 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 20:32:03 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 20:32:03 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 20:32:03 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 20:32:03 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 20:32:03 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 20:32:03 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 20:32:03 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:32:03 [DEBU] [Event:AgentCheckState] end
+2024-04-10 20:32:03 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 20:32:03 [DEBU] [Event:CheckPackage] end
+2024-04-10 20:32:03 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 20:32:03 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:32:03 [DEBU] [Event:AgentCheckCache] end, time: 237.9µs
+2024-04-10 20:33:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 20:33:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 20:34:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 20:34:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 20:35:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 20:35:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 20:35:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 20:35:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 20:35:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:35:59 [DEBU] [Event:AgentCheckCache] end, time: 508.6µs
+2024-04-10 20:36:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 20:36:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 20:36:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 20:36:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:36:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 20:36:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 20:36:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 20:36:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 20:36:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 20:36:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 20:36:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 20:36:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 20:36:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 20:36:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 20:36:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 20:38:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 20:38:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 20:39:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 20:39:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 20:40:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 20:40:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 20:40:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 20:40:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 20:40:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:40:59 [DEBU] [Event:AgentCheckCache] end, time: 1.016ms
+2024-04-10 20:41:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 20:41:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 20:41:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 20:41:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:41:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 20:41:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 20:41:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 20:41:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 20:41:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 20:41:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 20:41:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 20:41:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 20:41:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 20:41:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 20:41:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 20:43:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 20:43:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 20:44:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 20:44:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 20:45:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 20:45:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 20:45:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 20:45:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 20:45:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:45:59 [DEBU] [Event:AgentCheckCache] end, time: 708.3µs
+2024-04-10 20:46:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 20:46:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 20:46:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 20:46:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:46:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 20:46:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 20:46:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 20:46:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 20:46:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 20:46:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 20:46:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 20:46:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 20:46:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 20:46:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 20:46:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 20:48:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 20:48:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 20:49:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 20:49:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 20:50:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 20:50:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 20:50:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 20:50:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 20:50:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:50:59 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 20:51:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 20:51:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 20:51:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 20:51:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:51:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 20:51:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 20:51:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 20:51:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 20:51:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 20:51:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 20:51:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 20:51:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 20:51:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 20:51:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 20:51:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 20:53:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 20:53:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 20:54:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 20:54:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 20:55:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 20:55:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 20:55:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 20:55:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 20:55:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:55:59 [DEBU] [Event:AgentCheckCache] end, time: 509.3µs
+2024-04-10 20:56:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 20:56:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 20:56:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 20:56:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 20:56:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 20:56:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 20:56:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 20:56:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 20:56:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 20:56:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 20:56:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 20:56:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 20:56:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 20:56:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 20:56:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 20:58:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 20:58:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 20:59:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 20:59:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 21:00:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 21:00:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 21:00:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 21:00:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 21:00:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 21:00:59 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 21:01:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 21:01:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 21:01:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 21:01:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 21:01:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 21:01:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 21:01:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 21:01:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 21:01:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 21:01:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 21:01:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 21:01:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 21:01:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 21:01:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 21:01:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 21:03:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 21:03:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 21:04:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 21:04:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 21:05:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 21:05:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 21:05:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 21:05:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 21:05:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 21:05:59 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 21:06:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 21:06:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 21:06:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 21:06:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 21:06:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 21:06:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 21:06:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 21:06:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 21:06:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 21:06:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 21:06:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 21:06:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 21:06:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 21:06:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 21:06:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 21:08:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 21:08:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 21:09:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 21:09:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 21:10:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 21:10:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 21:10:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 21:10:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 21:10:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 21:10:59 [DEBU] [Event:AgentCheckCache] end, time: 575.9µs
+2024-04-10 21:11:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 21:11:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 21:11:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 21:11:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 21:11:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 21:11:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 21:11:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 21:11:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 21:11:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 21:11:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 21:11:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 21:11:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 21:11:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 21:11:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 21:11:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 21:13:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 21:13:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 21:14:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 21:14:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 21:15:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 21:15:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 21:15:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 21:15:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 21:15:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 21:15:59 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 21:16:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 21:16:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 21:16:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 21:16:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 21:16:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 21:16:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 21:16:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 21:16:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 21:16:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 21:16:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 21:16:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 21:16:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 21:16:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 21:16:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 21:16:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 21:18:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 21:18:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 21:19:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 21:19:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 21:20:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 21:20:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 21:20:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 21:20:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 21:20:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 21:20:59 [DEBU] [Event:AgentCheckCache] end, time: 591.6µs
+2024-04-10 21:21:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 21:21:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 21:21:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 21:21:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 21:21:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 21:21:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 21:21:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 21:21:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 21:21:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 21:21:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 21:21:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 21:21:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 21:21:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 21:21:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 21:21:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 21:23:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 21:23:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 21:24:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 21:24:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 21:25:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 21:25:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 21:25:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 21:25:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 21:25:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 21:25:59 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 21:26:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 21:26:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 21:26:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 21:26:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 21:26:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 21:26:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 21:26:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 21:26:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 21:26:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 21:26:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 21:26:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 21:26:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 21:26:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 21:26:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 21:26:45 [DEBU] [Event:CheckPackage] end
+2024-04-10 21:50:58 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 21:50:58 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-10 21:50:58 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-10 21:50:58 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-10 21:50:58 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-10 21:50:58 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 21:50:58 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 21:50:58 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 21:50:58 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 21:50:58 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 21:50:58 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 21:50:58 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 21:50:58 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 21:50:58 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 21:50:58 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 21:50:58 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 21:50:58 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 21:50:58 [DEBU] [Event:CheckPackage] end
+2024-04-10 21:50:58 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 21:50:58 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 21:50:58 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 21:50:58 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 21:50:58 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 21:50:58 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 21:50:58 [DEBU] [Event:AgentCheckState] end
+2024-04-10 21:50:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-10 21:50:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-10 21:50:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-10 21:50:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-10 21:50:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 21:50:59 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-10 21:51:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-10 21:51:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-10 21:51:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-10 21:51:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-10 21:51:12 [DEBU] [Event:AgentCheckState] end
+2024-04-10 21:51:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-10 21:51:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-10 21:51:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-10 21:51:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-10 21:51:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-10 21:51:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-10 21:51:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-10 21:51:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-10 21:51:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-10 21:51:45 [DEBU] [Event:CheckPackage] end
+2024-04-11 00:53:07 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 00:53:07 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 00:53:07 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 00:53:07 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 00:53:07 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 00:53:07 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 00:53:07 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 00:53:07 [DEBU] [TickEvent:BatchCleanPinned] all package clean pinned, next time will start check at offset 0
+2024-04-11 00:53:07 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 00:53:07 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 00:53:07 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 00:53:07 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 00:53:07 [DEBU] [Event:CheckPackage] end
+2024-04-11 00:53:07 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 00:53:07 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 00:53:07 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 00:53:07 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-11 00:53:07 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 00:53:07 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 00:53:07 [DEBU] [TickEvent:BatchCheckPackageRedundancy] all package checked, next time will start check at offset 0
+2024-04-11 00:53:07 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 00:53:07 [DEBU] [Event:AgentCheckState] end
+2024-04-11 00:53:07 [DEBU] [Event:CleanPinned] begin with , PackageID: 1
+2024-04-11 08:52:58 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 08:52:58 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 08:52:58 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 08:52:58 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 08:52:58 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 08:52:58 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 08:52:58 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 08:52:58 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 08:52:58 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 08:52:58 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 08:52:58 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 08:52:58 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 08:52:58 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 08:52:58 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 08:52:58 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 08:53:22 [WARN] [Event:CleanPinned] getting package objects: requesting: wait response timeout
+2024-04-11 08:53:22 [DEBU] [Event:CleanPinned] end, time: 8h0m15.0458827s
+2024-04-11 08:53:22 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 08:53:22 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-11 08:53:22 [DEBU] [Event:AgentCheckState] end
+2024-04-11 08:53:22 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 08:53:22 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-11 08:53:22 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-11 08:53:22 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 08:53:22 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 08:53:22 [DEBU] [Event:CheckPackageRedundancy] begin with , PackageID: 1
+2024-04-11 08:53:22 [WARN] [Event:CheckPackageRedundancy] getting package objects: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-11 08:53:22 [DEBU] [Event:CheckPackageRedundancy] end, time: 0s
+2024-04-11 08:53:22 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 08:53:22 [DEBU] [Event:CheckPackage] end
+2024-04-11 08:53:22 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 08:53:22 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 08:53:22 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 08:53:22 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-11 08:53:22 [DEBU] [Event:AgentCheckCache] end, time: 112.5µs
+2024-04-11 08:53:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 08:53:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 08:54:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 08:54:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 08:55:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 08:55:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 08:55:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 08:55:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 08:55:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-11 08:55:59 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-11 08:56:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 08:56:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 08:56:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 08:56:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-11 08:56:12 [DEBU] [Event:AgentCheckState] end
+2024-04-11 08:56:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 08:56:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 08:56:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 08:56:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 08:56:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 08:56:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 08:56:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 08:56:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 08:56:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 08:56:45 [DEBU] [Event:CheckPackage] end
+2024-04-11 08:58:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 08:58:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 08:59:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 08:59:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 09:00:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 09:00:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 09:00:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 09:00:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 09:00:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-11 09:00:59 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-11 09:01:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 09:01:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 09:01:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 09:01:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-11 09:01:12 [DEBU] [Event:AgentCheckState] end
+2024-04-11 09:01:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 09:01:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 09:01:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 09:01:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 09:01:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 09:01:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 09:01:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 09:01:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 09:01:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 09:01:45 [DEBU] [Event:CheckPackage] end
+2024-04-11 09:03:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 09:03:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 09:04:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 09:04:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 09:05:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 09:05:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 09:05:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 09:05:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 09:05:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-11 09:05:59 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-11 09:06:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 09:06:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 09:06:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 09:06:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-11 09:06:12 [DEBU] [Event:AgentCheckState] end
+2024-04-11 09:06:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 09:06:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 09:06:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 09:06:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 09:06:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 09:06:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 09:06:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 09:06:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 09:06:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 09:06:45 [DEBU] [Event:CheckPackage] end
+2024-04-11 09:08:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 09:08:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 09:09:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 09:09:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 09:10:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 09:10:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 09:10:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 09:10:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 09:10:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-11 09:10:59 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-11 09:11:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 09:11:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 09:11:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 09:11:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-11 09:11:12 [DEBU] [Event:AgentCheckState] end
+2024-04-11 09:11:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 09:11:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 09:11:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 09:11:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 09:11:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 09:11:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 09:11:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 09:11:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 09:11:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 09:11:45 [DEBU] [Event:CheckPackage] end
+2024-04-11 09:13:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 09:13:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 09:14:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 09:14:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 09:15:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 09:15:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 09:15:59 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 09:15:59 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 09:15:59 [WARN] [Event:AgentCheckCache] [NodeID:1] checking ipfs: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-11 09:15:59 [DEBU] [Event:AgentCheckCache] end, time: 0s
+2024-04-11 09:16:12 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 09:16:12 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 09:16:12 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 09:16:12 [WARN] [Event:AgentCheckState] [NodeID:1] getting state: requesting: sending message: publishing data: Exception (504) Reason: "channel/connection is not open"
+2024-04-11 09:16:12 [DEBU] [Event:AgentCheckState] end
+2024-04-11 09:16:31 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 09:16:31 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 09:16:31 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 09:16:31 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 09:16:31 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 09:16:45 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 09:16:45 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 09:16:45 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 09:16:45 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 09:16:45 [DEBU] [Event:CheckPackage] end
+2024-04-11 09:18:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 09:18:30 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 09:19:39 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 09:19:39 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 09:22:38 [INFO] start serving distlock
+2024-04-11 09:22:38 [INFO] start serving event executor
+2024-04-11 09:22:38 [INFO] start serving scanner server
+2024-04-11 09:27:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 09:27:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 09:27:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 09:27:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 09:28:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 09:28:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 09:28:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 09:28:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 09:28:00 [DEBU] [Event:AgentCheckCache] end, time: 5.1044ms
+2024-04-11 09:28:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 09:28:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 09:28:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 09:28:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 09:28:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 09:28:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 09:28:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 09:28:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 09:28:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 09:28:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 09:28:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 09:28:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 09:32:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 09:32:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 09:32:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 09:32:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 09:33:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 09:33:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 09:33:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 09:33:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 09:33:00 [DEBU] [Event:AgentCheckCache] end, time: 7.1757ms
+2024-04-11 09:33:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 09:33:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 09:33:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 09:33:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 09:33:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 09:33:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 09:33:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 09:33:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 09:33:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 09:33:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 09:33:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 09:33:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 09:37:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 09:37:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 09:37:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 09:37:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 09:38:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 09:38:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 09:38:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 09:38:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 09:38:00 [DEBU] [Event:AgentCheckCache] end, time: 3.8223ms
+2024-04-11 09:38:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 09:38:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 09:38:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 09:38:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 09:38:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 09:38:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 09:38:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 09:38:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 09:38:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 09:38:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 09:38:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 09:38:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 09:38:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 09:38:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 09:42:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 09:42:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 09:42:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 09:42:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 09:43:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 09:43:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 09:43:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 09:43:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 09:43:00 [DEBU] [Event:AgentCheckCache] end, time: 7.1344ms
+2024-04-11 09:43:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 09:43:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 09:43:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 09:43:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 09:43:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 09:43:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 09:43:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 09:43:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 09:43:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 09:43:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 09:43:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 09:43:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 09:43:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 09:43:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 09:47:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 09:47:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 09:47:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 09:47:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 09:48:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 09:48:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 09:48:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 09:48:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 09:48:00 [DEBU] [Event:AgentCheckCache] end, time: 6.7633ms
+2024-04-11 09:48:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 09:48:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 09:48:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 09:48:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 09:48:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 09:48:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 09:48:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 09:48:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 09:48:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 09:48:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 09:48:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 09:48:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 09:48:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 09:48:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 09:52:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 09:52:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 09:52:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 09:52:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 09:53:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 09:53:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 09:53:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 09:53:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 09:53:00 [DEBU] [Event:AgentCheckCache] end, time: 5.3633ms
+2024-04-11 09:53:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 09:53:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 09:53:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 09:53:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 09:53:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 09:53:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 09:53:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 09:53:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 09:53:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 09:53:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 09:53:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 09:53:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 09:53:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 09:53:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 09:57:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 09:57:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 09:57:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 09:57:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 09:58:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 09:58:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 09:58:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 09:58:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 09:58:00 [DEBU] [Event:AgentCheckCache] end, time: 8.5426ms
+2024-04-11 09:58:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 09:58:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 09:58:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 09:58:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 09:58:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 09:58:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 09:58:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 09:58:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 09:58:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 09:58:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 09:58:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 09:58:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 09:58:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 09:58:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 10:02:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 10:02:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 10:02:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 10:02:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 10:03:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 10:03:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 10:03:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 10:03:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 10:03:00 [DEBU] [Event:AgentCheckCache] end, time: 6.8867ms
+2024-04-11 10:03:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 10:03:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 10:03:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 10:03:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 10:03:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 10:03:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 10:03:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 10:03:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 10:03:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 10:03:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 10:03:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 10:03:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 10:03:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 10:03:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 10:07:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 10:07:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 10:07:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 10:07:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 10:08:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 10:08:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 10:08:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 10:08:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 10:08:00 [DEBU] [Event:AgentCheckCache] end, time: 3.2183ms
+2024-04-11 10:08:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 10:08:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 10:08:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 10:08:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 10:08:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 10:08:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 10:08:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 10:08:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 10:08:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 10:08:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 10:08:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 10:08:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 10:08:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 10:08:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 10:12:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 10:12:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 10:12:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 10:12:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 10:13:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 10:13:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 10:13:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 10:13:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 10:13:00 [DEBU] [Event:AgentCheckCache] end, time: 4.0063ms
+2024-04-11 10:13:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 10:13:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 10:13:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 10:13:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 10:13:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 10:13:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 10:13:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 10:13:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 10:13:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 10:13:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 10:13:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 10:13:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 10:13:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 10:13:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 10:17:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 10:17:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 10:17:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 10:17:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 10:18:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 10:18:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 10:18:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 10:18:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 10:18:00 [DEBU] [Event:AgentCheckCache] end, time: 21.883ms
+2024-04-11 10:18:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 10:18:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 10:18:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 10:18:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 10:18:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 10:18:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 10:18:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 10:18:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 10:18:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 10:18:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 10:18:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 10:18:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 10:18:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 10:18:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 10:22:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 10:22:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 10:22:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 10:22:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 10:23:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 10:23:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 10:23:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 10:23:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 10:23:00 [DEBU] [Event:AgentCheckCache] end, time: 5.3425ms
+2024-04-11 10:23:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 10:23:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 10:23:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 10:23:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 10:23:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 10:23:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 10:23:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 10:23:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 10:23:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 10:23:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 10:23:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 10:23:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 10:23:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 10:23:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 10:27:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 10:27:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 10:27:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 10:27:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 10:28:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 10:28:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 10:28:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 10:28:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 10:28:00 [DEBU] [Event:AgentCheckCache] end, time: 7.4954ms
+2024-04-11 10:28:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 10:28:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 10:28:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 10:28:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 10:28:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 10:28:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 10:28:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 10:28:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 10:28:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 10:28:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 10:28:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 10:28:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 10:28:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 10:28:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 10:32:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 10:32:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 10:32:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 10:32:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 10:33:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 10:33:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 10:33:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 10:33:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 10:33:00 [DEBU] [Event:AgentCheckCache] end, time: 8.978ms
+2024-04-11 10:33:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 10:33:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 10:33:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 10:33:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 10:33:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 10:33:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 10:33:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 10:33:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 10:33:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 10:33:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 10:33:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 10:33:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 10:33:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 10:33:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 10:37:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 10:37:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 10:37:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 10:37:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 10:38:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 10:38:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 10:38:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 10:38:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 10:38:00 [DEBU] [Event:AgentCheckCache] end, time: 10.1678ms
+2024-04-11 10:38:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 10:38:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 10:38:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 10:38:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 10:38:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 10:38:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 10:38:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 10:38:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 10:38:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 10:38:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 10:38:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 10:38:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 10:38:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 10:38:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 10:42:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 10:42:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 10:42:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 10:42:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 10:43:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 10:43:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 10:43:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 10:43:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 10:43:00 [DEBU] [Event:AgentCheckCache] end, time: 16.5366ms
+2024-04-11 10:43:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 10:43:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 10:43:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 10:43:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 10:43:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 10:43:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 10:43:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 10:43:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 10:43:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 10:43:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 10:43:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 10:43:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 10:43:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 10:43:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 10:47:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 10:47:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 10:47:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 10:47:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 10:48:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 10:48:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 10:48:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 10:48:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 10:48:00 [DEBU] [Event:AgentCheckCache] end, time: 12.485ms
+2024-04-11 10:48:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 10:48:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 10:48:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 10:48:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 10:48:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 10:48:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 10:48:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 10:48:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 10:48:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 10:48:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 10:48:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 10:48:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 10:48:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 10:48:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 10:52:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 10:52:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 10:52:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 10:52:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 10:53:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 10:53:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 10:53:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 10:53:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 10:53:00 [DEBU] [Event:AgentCheckCache] end, time: 13.8919ms
+2024-04-11 10:53:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 10:53:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 10:53:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 10:53:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 10:53:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 10:53:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 10:53:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 10:53:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 10:53:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 10:53:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 10:53:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 10:53:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 10:53:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 10:53:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 10:57:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 10:57:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 10:57:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 10:57:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 10:58:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 10:58:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 10:58:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 10:58:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 10:58:00 [DEBU] [Event:AgentCheckCache] end, time: 4.9723ms
+2024-04-11 10:58:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 10:58:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 10:58:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 10:58:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 10:58:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 10:58:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 10:58:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 10:58:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 10:58:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 10:58:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 10:58:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 10:58:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 10:58:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 10:58:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 11:02:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 11:02:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 11:02:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 11:02:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 11:03:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 11:03:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 11:03:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 11:03:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 11:03:00 [DEBU] [Event:AgentCheckCache] end, time: 8.9743ms
+2024-04-11 11:03:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 11:03:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 11:03:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 11:03:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 11:03:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 11:03:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 11:03:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 11:03:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 11:03:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 11:03:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 11:03:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 11:03:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 11:03:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 11:03:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 11:07:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 11:07:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 11:07:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 11:07:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 11:08:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 11:08:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 11:08:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 11:08:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 11:08:00 [DEBU] [Event:AgentCheckCache] end, time: 8.4235ms
+2024-04-11 11:08:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 11:08:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 11:08:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 11:08:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 11:08:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 11:08:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 11:08:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 11:08:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 11:08:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 11:08:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 11:08:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 11:08:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 11:08:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 11:08:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 11:12:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 11:12:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 11:12:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 11:12:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 11:13:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 11:13:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 11:13:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 11:13:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 11:13:00 [DEBU] [Event:AgentCheckCache] end, time: 6.9862ms
+2024-04-11 11:13:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 11:13:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 11:13:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 11:13:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 11:13:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 11:13:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 11:13:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 11:13:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 11:13:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 11:13:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 11:13:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 11:13:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 11:13:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 11:13:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 11:17:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 11:17:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 11:17:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 11:17:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 11:18:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 11:18:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 11:18:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 11:18:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 11:18:00 [DEBU] [Event:AgentCheckCache] end, time: 8.1758ms
+2024-04-11 11:18:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 11:18:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 11:18:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 11:18:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 11:18:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 11:18:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 11:18:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 11:18:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 11:18:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 11:18:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 11:18:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 11:18:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 11:18:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 11:18:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 11:22:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 11:22:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 11:22:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 11:22:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 11:23:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 11:23:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 11:23:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 11:23:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 11:23:00 [DEBU] [Event:AgentCheckCache] end, time: 7.2566ms
+2024-04-11 11:23:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 11:23:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 11:23:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 11:23:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 11:23:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 11:23:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 11:23:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 11:23:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 11:23:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 11:23:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 11:23:16 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 11:23:16 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 11:23:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 11:23:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 11:27:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 11:27:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 11:27:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 11:27:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 11:28:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 11:28:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 11:28:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 11:28:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 11:28:00 [DEBU] [Event:AgentCheckCache] end, time: 7.8499ms
+2024-04-11 11:28:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 11:28:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 11:28:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 11:28:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 11:28:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 11:28:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 11:28:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 11:28:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 11:28:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 11:28:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 11:28:17 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 11:28:17 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 11:28:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 11:28:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 11:32:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 11:32:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 11:32:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 11:32:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 11:33:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 11:33:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 11:33:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 11:33:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 11:33:00 [DEBU] [Event:AgentCheckCache] end, time: 3.5236ms
+2024-04-11 11:33:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 11:33:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 11:33:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 11:33:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 11:33:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 11:33:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 11:33:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 11:33:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 11:33:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 11:33:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 11:33:17 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 11:33:17 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 11:33:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 11:33:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 11:37:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 11:37:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 11:37:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 11:37:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 11:38:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 11:38:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 11:38:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 11:38:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 11:38:00 [DEBU] [Event:AgentCheckCache] end, time: 17.9737ms
+2024-04-11 11:38:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 11:38:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 11:38:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 11:38:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 11:38:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 11:38:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 11:38:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 11:38:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 11:38:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 11:38:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 11:38:17 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 11:38:17 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 11:38:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 11:38:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 11:42:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 11:42:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 11:42:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 11:42:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 11:43:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 11:43:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 11:43:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 11:43:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 11:43:00 [DEBU] [Event:AgentCheckCache] end, time: 5.9095ms
+2024-04-11 11:43:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 11:43:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 11:43:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 11:43:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 11:43:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 11:43:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 11:43:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 11:43:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 11:43:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 11:43:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 11:43:17 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 11:43:17 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 11:43:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 11:43:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 11:47:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 11:47:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 11:47:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 11:47:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 11:48:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 11:48:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 11:48:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 11:48:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 11:48:00 [DEBU] [Event:AgentCheckCache] end, time: 12.1545ms
+2024-04-11 11:48:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 11:48:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 11:48:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 11:48:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 11:48:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 11:48:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 11:48:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 11:48:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 11:48:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 11:48:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 11:48:17 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 11:48:17 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 11:48:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 11:48:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 11:52:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 11:52:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 11:52:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 11:52:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 11:53:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 11:53:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 11:53:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 11:53:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 11:53:00 [DEBU] [Event:AgentCheckCache] end, time: 3.1681ms
+2024-04-11 11:53:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 11:53:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 11:53:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 11:53:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 11:53:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 11:53:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 11:53:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 11:53:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 11:53:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 11:53:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 11:53:17 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 11:53:17 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 11:53:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 11:53:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 11:57:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 11:57:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 11:57:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 11:57:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 11:58:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 11:58:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 11:58:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 11:58:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 11:58:00 [DEBU] [Event:AgentCheckCache] end, time: 3.9385ms
+2024-04-11 11:58:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 11:58:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 11:58:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 11:58:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 11:58:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 11:58:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 11:58:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 11:58:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 11:58:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 11:58:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 11:58:17 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 11:58:17 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 11:58:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 11:58:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 12:02:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 12:02:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 12:02:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 12:02:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 12:03:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 12:03:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 12:03:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 12:03:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 12:03:00 [DEBU] [Event:AgentCheckCache] end, time: 3.594ms
+2024-04-11 12:03:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 12:03:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 12:03:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 12:03:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 12:03:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 12:03:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 12:03:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 12:03:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 12:03:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 12:03:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 12:03:17 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 12:03:17 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 12:03:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 12:03:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 12:07:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 12:07:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 12:07:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 12:07:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 12:08:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 12:08:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 12:08:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 12:08:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 12:08:00 [DEBU] [Event:AgentCheckCache] end, time: 3.0083ms
+2024-04-11 12:08:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 12:08:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 12:08:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 12:08:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 12:08:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 12:08:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 12:08:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 12:08:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 12:08:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 12:08:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 12:08:17 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 12:08:17 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 12:08:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 12:08:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 12:12:53 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 12:12:53 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 12:12:53 [DEBU] [Event:AgentCheckState] begin with , NodeID: 1
+2024-04-11 12:12:53 [DEBU] [Event:AgentCheckState] end
+2024-04-11 12:13:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 12:13:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 12:13:00 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 12:13:00 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 12:13:00 [DEBU] [Event:AgentCheckCache] end, time: 7.9783ms
+2024-04-11 12:13:05 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 12:13:05 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 12:13:05 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 12:13:05 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 12:13:05 [DEBU] [Event:CheckPackage] end
+2024-04-11 12:13:12 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 12:13:12 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 12:13:12 [DEBU] [TickEvent:BatchCheckAllStorage] end
+2024-04-11 12:13:12 [DEBU] [Event:AgentCheckStorage] begin with , StorageID: 1
+2024-04-11 12:13:12 [DEBU] [Event:AgentCheckStorage] end
+2024-04-11 12:13:17 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 12:13:17 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 12:13:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 12:13:37 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 12:19:52 [DEBU] [TickEvent:BatchCheckPackageRedundancy] begin
+2024-04-11 12:19:52 [DEBU] [TickEvent:BatchCheckPackageRedundancy] end
+2024-04-11 12:19:52 [DEBU] [TickEvent:BatchCheckAllPackage] begin
+2024-04-11 12:19:52 [DEBU] [TickEvent:BatchCleanPinned] begin
+2024-04-11 12:19:52 [DEBU] [TickEvent:BatchCleanPinned] end
+2024-04-11 12:19:52 [DEBU] [TickEvent:BatchAllAgentCheckCache] begin
+2024-04-11 12:19:52 [DEBU] [TickEvent:BatchCheckAllStorage] begin
+2024-04-11 12:19:52 [DEBU] [TickEvent:CheckAgentState] begin
+2024-04-11 12:19:52 [DEBU] [TickEvent:BatchCheckAllPackage] all package checked, next time will start check at 0
+2024-04-11 12:19:52 [DEBU] [TickEvent:BatchCheckAllPackage] end
+2024-04-11 12:19:52 [DEBU] [Event:CheckPackage] begin with , len(PackageIDs): 1
+2024-04-11 12:19:52 [DEBU] [Event:CheckPackage] end
+2024-04-11 12:19:52 [DEBU] [TickEvent:BatchAllAgentCheckCache] new check start, get all nodes
+2024-04-11 12:19:52 [DEBU] [TickEvent:BatchAllAgentCheckCache] end
+2024-04-11 12:19:52 [DEBU] [Event:AgentCheckCache] begin with , NodeID: 1
+2024-04-11 12:19:52 [DEBU] [TickEvent:CheckAgentState] end
+2024-04-11 12:19:52 [DEBU] [TickEvent:BatchCheckAllStorage] all storage checked, next time will start check at 0
+2024-04-11 12:19:52 [DEBU] [TickEvent:BatchCheckAllStorage] end
diff --git a/scanner/internal/event/agent_cache_gc.go b/scanner/internal/event/agent_cache_gc.go
index f37bff8..f66e9b9 100644
--- a/scanner/internal/event/agent_cache_gc.go
+++ b/scanner/internal/event/agent_cache_gc.go
@@ -15,16 +15,22 @@ import (
scevt "gitlink.org.cn/cloudream/storage/common/pkgs/mq/scanner/event"
)
+// AgentCacheGC 类封装了扫描器事件中的AgentCacheGC结构。
type AgentCacheGC struct {
*scevt.AgentCacheGC
}
+// NewAgentCacheGC 创建一个新的AgentCacheGC实例。
+// evt: 传入的扫描器事件中的AgentCacheGC实例。
func NewAgentCacheGC(evt *scevt.AgentCacheGC) *AgentCacheGC {
return &AgentCacheGC{
AgentCacheGC: evt,
}
}
+// TryMerge 尝试合并当前事件与另一个事件。
+// other: 待合并的另一个事件。
+// 返回值表示是否成功合并。
func (t *AgentCacheGC) TryMerge(other Event) bool {
event, ok := other.(*AgentCacheGC)
if !ok {
@@ -38,6 +44,8 @@ func (t *AgentCacheGC) TryMerge(other Event) bool {
return true
}
+// Execute 执行垃圾回收操作。
+// execCtx: 执行上下文,包含执行所需的各种参数和环境。
func (t *AgentCacheGC) Execute(execCtx ExecuteContext) {
log := logger.WithType[AgentCacheGC]("Event")
startTime := time.Now()
@@ -46,10 +54,9 @@ func (t *AgentCacheGC) Execute(execCtx ExecuteContext) {
log.Debugf("end, time: %v", time.Since(startTime))
}()
- // TODO unavailable的节点需不需要发送任务?
-
+ // 使用分布式锁进行资源锁定
mutex, err := reqbuilder.NewBuilder().
- // 进行GC
+ // 执行IPFS垃圾回收
IPFS().GC(t.NodeID).
MutexLock(execCtx.Args.DistLock)
if err != nil {
@@ -58,6 +65,7 @@ func (t *AgentCacheGC) Execute(execCtx ExecuteContext) {
}
defer mutex.Unlock()
+ // 收集需要进行垃圾回收的文件哈希值
var allFileHashes []string
err = execCtx.Args.DB.DoTx(sql.LevelSerializable, func(tx *sqlx.Tx) error {
blocks, err := execCtx.Args.DB.ObjectBlock().GetByNodeID(tx, t.NodeID)
@@ -83,6 +91,7 @@ func (t *AgentCacheGC) Execute(execCtx ExecuteContext) {
return
}
+ // 获取与节点通信的代理客户端
agtCli, err := stgglb.AgentMQPool.Acquire(t.NodeID)
if err != nil {
log.WithField("NodeID", t.NodeID).Warnf("create agent client failed, err: %s", err.Error())
@@ -90,6 +99,7 @@ func (t *AgentCacheGC) Execute(execCtx ExecuteContext) {
}
defer stgglb.AgentMQPool.Release(agtCli)
+ // 向代理发送垃圾回收请求
_, err = agtCli.CacheGC(agtmq.ReqCacheGC(allFileHashes), mq.RequestOption{Timeout: time.Minute})
if err != nil {
log.WithField("NodeID", t.NodeID).Warnf("ipfs gc: %s", err.Error())
@@ -97,6 +107,7 @@ func (t *AgentCacheGC) Execute(execCtx ExecuteContext) {
}
}
+// 注册消息转换器,使系统能够处理AgentCacheGC消息。
func init() {
RegisterMessageConvertor(NewAgentCacheGC)
}
diff --git a/scanner/internal/event/agent_check_cache.go b/scanner/internal/event/agent_check_cache.go
index 17bc18f..1243810 100644
--- a/scanner/internal/event/agent_check_cache.go
+++ b/scanner/internal/event/agent_check_cache.go
@@ -15,16 +15,20 @@ import (
scevt "gitlink.org.cn/cloudream/storage/common/pkgs/mq/scanner/event"
)
+// AgentCheckCache 代表一个用于处理代理缓存检查事件的结构体
type AgentCheckCache struct {
*scevt.AgentCheckCache
}
+// NewAgentCheckCache 创建一个新的 AgentCheckCache 实例
func NewAgentCheckCache(evt *scevt.AgentCheckCache) *AgentCheckCache {
return &AgentCheckCache{
AgentCheckCache: evt,
}
}
+// TryMerge 尝试合并当前事件与另一个事件
+// 如果另一个事件类型不匹配或节点ID不同,则不进行合并
func (t *AgentCheckCache) TryMerge(other Event) bool {
event, ok := other.(*AgentCheckCache)
if !ok {
@@ -38,6 +42,7 @@ func (t *AgentCheckCache) TryMerge(other Event) bool {
return true
}
+// Execute 执行缓存检查操作,对比本地缓存与代理返回的缓存信息,更新数据库中的缓存记录
func (t *AgentCheckCache) Execute(execCtx ExecuteContext) {
log := logger.WithType[AgentCheckCache]("Event")
startTime := time.Now()
@@ -45,7 +50,6 @@ func (t *AgentCheckCache) Execute(execCtx ExecuteContext) {
defer func() {
log.Debugf("end, time: %v", time.Since(startTime))
}()
- // TODO unavailable的节点需不需要发送任务?
agtCli, err := stgglb.AgentMQPool.Acquire(t.NodeID)
if err != nil {
@@ -62,7 +66,7 @@ func (t *AgentCheckCache) Execute(execCtx ExecuteContext) {
realFileHashes := lo.SliceToMap(checkResp.FileHashes, func(hash string) (string, bool) { return hash, true })
- // 根据IPFS中实际文件情况修改元数据。修改过程中的失败均忽略。(但关联修改需要原子性)
+ // 在事务中执行缓存更新操作
execCtx.Args.DB.DoTx(sql.LevelSerializable, func(tx *sqlx.Tx) error {
t.checkCache(execCtx, tx, realFileHashes)
@@ -73,7 +77,7 @@ func (t *AgentCheckCache) Execute(execCtx ExecuteContext) {
})
}
-// 对比Cache表中的记录,多了增加,少了删除
+// checkCache 对比Cache表中的记录,根据实际存在的文件哈希值,进行增加或删除操作
func (t *AgentCheckCache) checkCache(execCtx ExecuteContext, tx *sqlx.Tx, realFileHashes map[string]bool) {
log := logger.WithType[AgentCheckCache]("Event")
@@ -91,8 +95,6 @@ func (t *AgentCheckCache) checkCache(execCtx ExecuteContext, tx *sqlx.Tx, realFi
var rms []string
for _, c := range caches {
if realFileHashesCp[c.FileHash] {
- // Cache表使用FileHash和NodeID作为主键,
- // 所以通过同一个NodeID查询的结果不会存在两条相同FileHash的情况
delete(realFileHashesCp, c.FileHash)
continue
}
@@ -115,7 +117,7 @@ func (t *AgentCheckCache) checkCache(execCtx ExecuteContext, tx *sqlx.Tx, realFi
}
}
-// 对比PinnedObject表,多了不变,少了删除
+// checkPinnedObject 对比PinnedObject表,若实际文件不存在,则进行删除操作
func (t *AgentCheckCache) checkPinnedObject(execCtx ExecuteContext, tx *sqlx.Tx, realFileHashes map[string]bool) {
log := logger.WithType[AgentCheckCache]("Event")
@@ -141,7 +143,7 @@ func (t *AgentCheckCache) checkPinnedObject(execCtx ExecuteContext, tx *sqlx.Tx,
}
}
-// 对比ObjectBlock表,多了不变,少了删除
+// checkObjectBlock 对比ObjectBlock表,若实际文件不存在,则进行删除操作
func (t *AgentCheckCache) checkObjectBlock(execCtx ExecuteContext, tx *sqlx.Tx, realFileHashes map[string]bool) {
log := logger.WithType[AgentCheckCache]("Event")
@@ -167,6 +169,7 @@ func (t *AgentCheckCache) checkObjectBlock(execCtx ExecuteContext, tx *sqlx.Tx,
}
}
+// init 注册AgentCheckCache消息转换器
func init() {
RegisterMessageConvertor(NewAgentCheckCache)
}
diff --git a/scanner/internal/event/agent_check_state.go b/scanner/internal/event/agent_check_state.go
index 66dfb70..d303aa0 100644
--- a/scanner/internal/event/agent_check_state.go
+++ b/scanner/internal/event/agent_check_state.go
@@ -13,16 +13,23 @@ import (
"gitlink.org.cn/cloudream/storage/scanner/internal/config"
)
+// AgentCheckState 类封装了扫描器代理检查状态的事件。
type AgentCheckState struct {
*scevt.AgentCheckState
}
+// NewAgentCheckState 创建一个新的AgentCheckState实例。
+// evt: 传入的AgentCheckState实例。
+// 返回: 新创建的AgentCheckState指针。
func NewAgentCheckState(evt *scevt.AgentCheckState) *AgentCheckState {
return &AgentCheckState{
AgentCheckState: evt,
}
}
+// TryMerge 尝试合并当前事件与另一个事件。
+// other: 待合并的另一个事件。
+// 返回: 成功合并返回true,否则返回false。
func (t *AgentCheckState) TryMerge(other Event) bool {
event, ok := other.(*AgentCheckState)
if !ok {
@@ -32,21 +39,26 @@ func (t *AgentCheckState) TryMerge(other Event) bool {
return t.NodeID == event.NodeID
}
+// Execute 执行节点状态检查操作。
+// execCtx: 执行上下文,包含执行时所需的所有参数和环境。
func (t *AgentCheckState) Execute(execCtx ExecuteContext) {
log := logger.WithType[AgentCheckState]("Event")
log.Debugf("begin with %v", logger.FormatStruct(t.AgentCheckState))
defer log.Debugf("end")
+ // 尝试根据节点ID获取节点信息
node, err := execCtx.Args.DB.Node().GetByID(execCtx.Args.DB.SQLCtx(), t.NodeID)
if err == sql.ErrNoRows {
return
}
+ // 获取节点失败的处理
if err != nil {
log.WithField("NodeID", t.NodeID).Warnf("get node by id failed, err: %s", err.Error())
return
}
+ // 获取代理客户端
agtCli, err := stgglb.AgentMQPool.Acquire(t.NodeID)
if err != nil {
log.WithField("NodeID", t.NodeID).Warnf("create agent client failed, err: %s", err.Error())
@@ -54,12 +66,12 @@ func (t *AgentCheckState) Execute(execCtx ExecuteContext) {
}
defer stgglb.AgentMQPool.Release(agtCli)
+ // 向代理请求获取当前状态
getResp, err := agtCli.GetState(agtmq.NewGetState(), mq.RequestOption{Timeout: time.Second * 30})
if err != nil {
log.WithField("NodeID", t.NodeID).Warnf("getting state: %s", err.Error())
- // 检查上次上报时间,超时的设置为不可用
- // TODO 没有上报过是否要特殊处理?
+ // 检查节点上次上报时间,若超时则设置节点为不可用状态
if node.LastReportTime != nil && time.Since(*node.LastReportTime) > time.Duration(config.Cfg().NodeUnavailableSeconds)*time.Second {
err := execCtx.Args.DB.Node().UpdateState(execCtx.Args.DB.SQLCtx(), t.NodeID, consts.NodeStateUnavailable)
if err != nil {
@@ -69,7 +81,7 @@ func (t *AgentCheckState) Execute(execCtx ExecuteContext) {
return
}
- // 根据返回结果修改节点状态
+ // 根据代理返回的节点状态更新节点状态
if getResp.IPFSState != consts.IPFSStateOK {
log.WithField("NodeID", t.NodeID).Warnf("IPFS status is %s, set node state unavailable", getResp.IPFSState)
@@ -80,13 +92,14 @@ func (t *AgentCheckState) Execute(execCtx ExecuteContext) {
return
}
- // TODO 如果以后还有其他的状态,要判断哪些状态下能设置Normal
+ // 更新节点状态为正常
err = execCtx.Args.DB.Node().UpdateState(execCtx.Args.DB.SQLCtx(), t.NodeID, consts.NodeStateNormal)
if err != nil {
log.WithField("NodeID", t.NodeID).Warnf("change node state failed, err: %s", err.Error())
}
}
+// init 注册AgentCheckState消息转换器。
func init() {
RegisterMessageConvertor(NewAgentCheckState)
}
diff --git a/scanner/internal/event/agent_check_storage.go b/scanner/internal/event/agent_check_storage.go
index 9edaacf..105e477 100644
--- a/scanner/internal/event/agent_check_storage.go
+++ b/scanner/internal/event/agent_check_storage.go
@@ -15,16 +15,19 @@ import (
scevt "gitlink.org.cn/cloudream/storage/common/pkgs/mq/scanner/event"
)
+// AgentCheckStorage 代表一个用于检查存储代理的事件处理类。
type AgentCheckStorage struct {
*scevt.AgentCheckStorage
}
+// NewAgentCheckStorage 创建并返回一个初始化的 AgentCheckStorage 实例。
func NewAgentCheckStorage(evt *scevt.AgentCheckStorage) *AgentCheckStorage {
return &AgentCheckStorage{
AgentCheckStorage: evt,
}
}
+// TryMerge 尝试合并当前事件与另一个事件。仅当两个事件具有相同的 StorageID 时才能合并。
func (t *AgentCheckStorage) TryMerge(other Event) bool {
event, ok := other.(*AgentCheckStorage)
if !ok {
@@ -38,13 +41,13 @@ func (t *AgentCheckStorage) TryMerge(other Event) bool {
return true
}
+// Execute 执行存储检查事件。此方法会与存储节点通信,校验存储状态并根据校验结果更新数据库。
func (t *AgentCheckStorage) Execute(execCtx ExecuteContext) {
log := logger.WithType[AgentCheckStorage]("Event")
log.Debugf("begin with %v", logger.FormatStruct(t.AgentCheckStorage))
defer log.Debugf("end")
- // 读取数据的地方就不加锁了,因为check任务会反复执行,单次失败问题不大
-
+ // 从数据库中获取存储和关联的节点信息
stg, err := execCtx.Args.DB.Storage().GetByID(execCtx.Args.DB.SQLCtx(), t.StorageID)
if err != nil {
if err != sql.ErrNoRows {
@@ -61,10 +64,12 @@ func (t *AgentCheckStorage) Execute(execCtx ExecuteContext) {
return
}
+ // 节点状态不正常时,直接返回
if node.State != consts.NodeStateNormal {
return
}
+ // 获取与存储节点通信的代理客户端
agtCli, err := stgglb.AgentMQPool.Acquire(stg.NodeID)
if err != nil {
log.WithField("NodeID", stg.NodeID).Warnf("create agent client failed, err: %s", err.Error())
@@ -72,11 +77,14 @@ func (t *AgentCheckStorage) Execute(execCtx ExecuteContext) {
}
defer stgglb.AgentMQPool.Release(agtCli)
+ // 向存储节点发送检查请求并处理响应
checkResp, err := agtCli.StorageCheck(agtmq.NewStorageCheck(stg.StorageID, stg.Directory), mq.RequestOption{Timeout: time.Minute})
if err != nil {
log.WithField("NodeID", stg.NodeID).Warnf("checking storage: %s", err.Error())
return
}
+
+ // 根据检查响应,整理出实际存在的包裹信息
realPkgs := make(map[cdssdk.UserID]map[cdssdk.PackageID]bool)
for _, pkg := range checkResp.Packages {
pkgs, ok := realPkgs[pkg.UserID]
@@ -88,6 +96,7 @@ func (t *AgentCheckStorage) Execute(execCtx ExecuteContext) {
pkgs[pkg.PackageID] = true
}
+ // 在事务中更新数据库,删除不存在的包裹信息
execCtx.Args.DB.DoTx(sql.LevelSerializable, func(tx *sqlx.Tx) error {
packages, err := execCtx.Args.DB.StoragePackage().GetAllByStorageID(tx, t.StorageID)
if err != nil {
@@ -118,7 +127,7 @@ func (t *AgentCheckStorage) Execute(execCtx ExecuteContext) {
rmdPkgIDs[rm.PackageID] = true
}
- // 彻底删除已经是Deleted状态,且不被再引用的Package
+ // 删除不再被引用的包裹
for pkgID := range rmdPkgIDs {
err := execCtx.Args.DB.Package().DeleteUnused(tx, pkgID)
if err != nil {
@@ -131,6 +140,7 @@ func (t *AgentCheckStorage) Execute(execCtx ExecuteContext) {
})
}
+// init 注册 AgentCheckStorage 事件处理器,使其能够响应相应的消息。
func init() {
RegisterMessageConvertor(NewAgentCheckStorage)
}
diff --git a/scanner/internal/event/agent_storage_gc.go b/scanner/internal/event/agent_storage_gc.go
index c3c22ad..6e09ca2 100644
--- a/scanner/internal/event/agent_storage_gc.go
+++ b/scanner/internal/event/agent_storage_gc.go
@@ -12,16 +12,22 @@ import (
scevt "gitlink.org.cn/cloudream/storage/common/pkgs/mq/scanner/event"
)
+// AgentStorageGC 类封装了扫描器事件中的代理存储垃圾回收功能。
type AgentStorageGC struct {
*scevt.AgentStorageGC
}
+// NewAgentStorageGC 创建一个新的AgentStorageGC实例。
+// evt: 传入的扫描器事件中的代理存储垃圾回收实例。
func NewAgentStorageGC(evt *scevt.AgentStorageGC) *AgentStorageGC {
return &AgentStorageGC{
AgentStorageGC: evt,
}
}
+// TryMerge 尝试合并两个事件。
+// other: 待合并的另一个事件。
+// 返回值表示是否成功合并。
func (t *AgentStorageGC) TryMerge(other Event) bool {
event, ok := other.(*AgentStorageGC)
if !ok {
@@ -35,6 +41,8 @@ func (t *AgentStorageGC) TryMerge(other Event) bool {
return true
}
+// Execute 执行存储垃圾回收任务。
+// execCtx: 执行上下文,包含执行所需的所有参数和环境。
func (t *AgentStorageGC) Execute(execCtx ExecuteContext) {
log := logger.WithType[AgentStorageGC]("Event")
startTime := time.Now()
@@ -43,10 +51,10 @@ func (t *AgentStorageGC) Execute(execCtx ExecuteContext) {
log.Debugf("end, time: %v", time.Since(startTime))
}()
- // TODO unavailable的节点需不需要发送任务?
+ // 尝试获取分布式锁并执行存储垃圾回收操作。
mutex, err := reqbuilder.NewBuilder().
- // 进行GC
+ // 进行垃圾回收。
Storage().GC(t.StorageID).
MutexLock(execCtx.Args.DistLock)
if err != nil {
@@ -55,6 +63,8 @@ func (t *AgentStorageGC) Execute(execCtx ExecuteContext) {
}
defer mutex.Unlock()
+ // 从数据库获取存储信息和存储包信息。
+
getStg, err := execCtx.Args.DB.Storage().GetByID(execCtx.Args.DB.SQLCtx(), t.StorageID)
if err != nil {
log.WithField("StorageID", t.StorageID).Warnf("getting storage: %s", err.Error())
@@ -67,6 +77,8 @@ func (t *AgentStorageGC) Execute(execCtx ExecuteContext) {
return
}
+ // 创建与存储节点的代理客户端。
+
agtCli, err := stgglb.AgentMQPool.Acquire(getStg.NodeID)
if err != nil {
log.WithField("NodeID", getStg.NodeID).Warnf("create agent client failed, err: %s", err.Error())
@@ -74,6 +86,8 @@ func (t *AgentStorageGC) Execute(execCtx ExecuteContext) {
}
defer stgglb.AgentMQPool.Release(agtCli)
+ // 向代理发送存储垃圾回收请求。
+
_, err = agtCli.StorageGC(agtmq.ReqStorageGC(t.StorageID, getStg.Directory, stgPkgs), mq.RequestOption{Timeout: time.Minute})
if err != nil {
log.WithField("StorageID", t.StorageID).Warnf("storage gc: %s", err.Error())
@@ -81,6 +95,7 @@ func (t *AgentStorageGC) Execute(execCtx ExecuteContext) {
}
}
+// 注册消息转换器,使系统能够处理AgentStorageGC事件。
func init() {
RegisterMessageConvertor(NewAgentStorageGC)
}
diff --git a/scanner/main.go b/scanner/main.go
index 88d6a5a..63d6b58 100644
--- a/scanner/main.go
+++ b/scanner/main.go
@@ -1,5 +1,6 @@
package main
+// 主程序包,负责初始化配置、日志、数据库连接、分布式锁、事件执行器、扫描器服务器和定时任务。
import (
"fmt"
"os"
@@ -17,28 +18,34 @@ import (
)
func main() {
+ // 初始化配置
err := config.Init()
if err != nil {
fmt.Printf("init config failed, err: %s", err.Error())
os.Exit(1)
}
+ // 初始化日志
err = logger.Init(&config.Cfg().Logger)
if err != nil {
fmt.Printf("init logger failed, err: %s", err.Error())
os.Exit(1)
}
+ // 初始化数据库连接
db, err := db.NewDB(&config.Cfg().DB)
if err != nil {
logger.Fatalf("new db failed, err: %s", err.Error())
}
+ // 初始化消息队列连接池
stgglb.InitMQPool(&config.Cfg().RabbitMQ)
+ // 同步等待组,用于等待所有Go协程完成
wg := sync.WaitGroup{}
wg.Add(3)
+ // 初始化分布式锁服务
distlockSvc, err := distlock.NewService(&config.Cfg().DistLock)
if err != nil {
logger.Warnf("new distlock service failed, err: %s", err.Error())
@@ -46,9 +53,11 @@ func main() {
}
go serveDistLock(distlockSvc, &wg)
+ // 初始化事件执行器,并启动服务
eventExecutor := event.NewExecutor(db, distlockSvc)
go serveEventExecutor(&eventExecutor, &wg)
+ // 初始化扫描器服务器,并启动服务
agtSvr, err := scmq.NewServer(mq.NewService(&eventExecutor), &config.Cfg().RabbitMQ)
if err != nil {
logger.Fatalf("new agent server failed, err: %s", err.Error())
@@ -59,15 +68,20 @@ func main() {
go serveScannerServer(agtSvr, &wg)
+ // 初始化并启动定时任务
tickExecutor := tickevent.NewExecutor(tickevent.ExecuteArgs{
EventExecutor: &eventExecutor,
DB: db,
})
startTickEvent(&tickExecutor)
+ // 等待所有服务完成
wg.Wait()
}
+// serveEventExecutor 启动事件执行器服务
+// executor: 事件执行器实例
+// wg: 同步等待组
func serveEventExecutor(executor *event.Executor, wg *sync.WaitGroup) {
logger.Info("start serving event executor")
@@ -82,6 +96,9 @@ func serveEventExecutor(executor *event.Executor, wg *sync.WaitGroup) {
wg.Done()
}
+// serveScannerServer 启动扫描器服务器服务
+// server: 扫描器服务器实例
+// wg: 同步等待组
func serveScannerServer(server *scmq.Server, wg *sync.WaitGroup) {
logger.Info("start serving scanner server")
@@ -96,6 +113,9 @@ func serveScannerServer(server *scmq.Server, wg *sync.WaitGroup) {
wg.Done()
}
+// serveDistLock 启动分布式锁服务
+// svc: 分布式锁服务实例
+// wg: 同步等待组
func serveDistLock(svc *distlock.Service, wg *sync.WaitGroup) {
logger.Info("start serving distlock")
@@ -110,22 +130,30 @@ func serveDistLock(svc *distlock.Service, wg *sync.WaitGroup) {
wg.Done()
}
+// startTickEvent 启动定时任务事件。
+// 参数 tickExecutor 为 ticket 事件执行器的指针,用于启动各种定时任务。
func startTickEvent(tickExecutor *tickevent.Executor) {
- // TODO 可以考虑增加配置文件,配置这些任务间隔时间
+ // 考虑增加配置文件来配置这些任务的间隔时间
- interval := 5 * 60 * 1000
+ interval := 5 * 60 * 1000 // 定义默认的任务执行间隔时间
+ // 启动所有 Agent 检查缓存的定时任务
tickExecutor.Start(tickevent.NewBatchAllAgentCheckCache(), interval, tickevent.StartOption{RandomStartDelayMs: 60 * 1000})
+ // 启动检查所有包的定时任务
tickExecutor.Start(tickevent.NewBatchCheckAllPackage(), interval, tickevent.StartOption{RandomStartDelayMs: 60 * 1000})
- // tickExecutor.Start(tickevent.NewBatchCheckAllRepCount(), interval, tickevent.StartOption{RandomStartDelayMs: 60 * 1000})
+ // 注释掉的代码块,可能是未来可能使用的任务,目前未启用
+ // 启动检查所有存储的定时任务
tickExecutor.Start(tickevent.NewBatchCheckAllStorage(), interval, tickevent.StartOption{RandomStartDelayMs: 60 * 1000})
+ // 启动检查 Agent 状态的定时任务,此任务的执行间隔与上述任务不同
tickExecutor.Start(tickevent.NewCheckAgentState(), 5*60*1000, tickevent.StartOption{RandomStartDelayMs: 60 * 1000})
+ // 启动检查包冗余的定时任务
tickExecutor.Start(tickevent.NewBatchCheckPackageRedundancy(), interval, tickevent.StartOption{RandomStartDelayMs: 20 * 60 * 1000})
+ // 启动清理固定项目的定时任务
tickExecutor.Start(tickevent.NewBatchCleanPinned(), interval, tickevent.StartOption{RandomStartDelayMs: 20 * 60 * 1000})
}