From f87ec06af69201233fddc94d43828ce25cfc0be0 Mon Sep 17 00:00:00 2001 From: Samuel N Cui Date: Mon, 12 Dec 2022 22:48:23 +0800 Subject: [PATCH] feat: useable --- .gitignore | 5 + .vscode/settings.json | 7 + apis/api.go | 21 + apis/converts.go | 98 + apis/device_list.go | 11 + apis/file_delete.go | 16 + apis/file_edit.go | 54 + apis/file_get.go | 37 + apis/file_list_parents.go | 16 + apis/file_mkdir.go | 28 + apis/job_create.go | 24 + apis/job_display.go | 21 + apis/job_get_log.go | 32 + apis/job_list.go | 27 + apis/job_next.go | 20 + apis/source_list.go | 88 + apis/tape_get.go | 30 + block.go | 78 - build.sh | 16 + build_linux.sh | 7 + cmd/argvtest/main.go | 11 - cmd/ordercp/main.go | 437 ----- cmd/tape-httpd/config.example.yaml | 19 + cmd/tape-httpd/main.go | 117 ++ cmd/tape-httpd/tape-writer.service | 16 + cmd/tape-import/main.go | 38 + cmd/tape-loadtape/main.go | 89 + cmd/writer/main.go | 47 - entity/copy_status.pb.go | 144 ++ entity/copy_status.proto | 14 + entity/file.pb.go | 274 +++ entity/file.proto | 19 + entity/job.go | 32 + entity/job.pb.go | 782 ++++++++ entity/job.proto | 61 + entity/job_archive.pb.go | 708 +++++++ entity/job_archive.proto | 49 + entity/job_restore.pb.go | 791 ++++++++ entity/job_restore.proto | 56 + entity/position.pb.go | 219 +++ entity/position.proto | 16 + entity/service.pb.go | 2004 ++++++++++++++++++++ entity/service.proto | 145 ++ entity/service_gen.sh | 16 + entity/service_grpc.pb.go | 537 ++++++ entity/source.go | 72 + entity/source.pb.go | 362 ++++ entity/source.proto | 28 + entity/tape.pb.go | 214 +++ entity/tape.proto | 15 + entity/utils.go | 73 + executor/executor.go | 131 ++ executor/job.go | 132 ++ executor/job_archive_display.go | 22 + executor/job_archive_exe.go | 357 ++++ executor/job_archive_param.go | 74 + executor/job_archive_start.go | 15 + executor/job_restore.go | 59 + executor/key.go | 54 + executor/log.go | 50 + executor/progress.go | 8 + external/external.go | 11 + external/from_json.go | 63 + frontend/.gitignore | 24 + frontend/.prettierrc.json | 3 + frontend/index.html | 16 + frontend/package.json | 46 + frontend/pnpm-lock.yaml | 2495 +++++++++++++++++++++++++ frontend/public/vite.svg | 1 + frontend/src/actions.ts | 26 + frontend/src/api.ts | 176 ++ frontend/src/app.less | 63 + frontend/src/app.tsx | 76 + frontend/src/assets/react.svg | 1 + frontend/src/backup.tsx | 327 ++++ frontend/src/detail.less | 23 + frontend/src/detail.tsx | 186 ++ frontend/src/entity/copy_status.ts | 34 + frontend/src/entity/file.ts | 202 ++ frontend/src/entity/gen_index.sh | 12 + frontend/src/entity/index.ts | 11 + frontend/src/entity/job.ts | 574 ++++++ frontend/src/entity/job_archive.ts | 498 +++++ frontend/src/entity/job_restore.ts | 583 ++++++ frontend/src/entity/position.ts | 157 ++ frontend/src/entity/service.client.ts | 193 ++ frontend/src/entity/service.ts | 1643 ++++++++++++++++ frontend/src/entity/source.ts | 281 +++ frontend/src/entity/tape.ts | 146 ++ frontend/src/file.tsx | 184 ++ frontend/src/index.css | 41 + frontend/src/init.tsx | 31 + frontend/src/jobs.tsx | 471 +++++ frontend/src/main.tsx | 10 + frontend/src/tools.ts | 15 + frontend/src/vite-env.d.ts | 1 + frontend/tsconfig.json | 21 + frontend/tsconfig.node.json | 9 + frontend/vite.config.ts | 15 + go.mod | 52 +- go.sum | 600 +++++- library/file.go | 477 ++++- library/library.go | 15 +- library/position.go | 51 +- library/tape.go | 116 +- maketape | 15 - mmap/manual_test_program.go | 56 - mmap/mmap_darwin.go | 136 -- mmap/mmap_linux.go | 145 -- mmap/mmap_other.go | 86 - mmap/mmap_test.go | 34 - mmap/mmap_windows.go | 141 -- resource/db.go | 1 - scripts/encrypt | 10 + encrypttape => scripts/encrypttape | 0 scripts/maketape | 45 + scripts/mkfs | 6 + scripts/mount | 6 + scripts/mounttape | 10 + test.sh => scripts/test.sh | 2 +- scripts/umount | 6 + sg_tools.go | 1 - consts.go => sgtape/consts.go | 2 +- sgtape/page.go | 115 ++ sgtape/sg_tools.go | 1 + tools.go => sgtape/tools.go | 2 +- writer.go => sgtape/writer.go.bak | 146 +- tapechanger/changer.go | 24 + tools/command.go | 25 + tools/filesystem.go | 28 + tools/filesystem_test.go | 16 + tools/pprof.go | 32 + tools/recover.go | 30 + tools/strings.go | 14 + 134 files changed, 18715 insertions(+), 1343 deletions(-) create mode 100644 .vscode/settings.json create mode 100644 apis/api.go create mode 100644 apis/converts.go create mode 100644 apis/device_list.go create mode 100644 apis/file_delete.go create mode 100644 apis/file_edit.go create mode 100644 apis/file_get.go create mode 100644 apis/file_list_parents.go create mode 100644 apis/file_mkdir.go create mode 100644 apis/job_create.go create mode 100644 apis/job_display.go create mode 100644 apis/job_get_log.go create mode 100644 apis/job_list.go create mode 100644 apis/job_next.go create mode 100644 apis/source_list.go create mode 100644 apis/tape_get.go delete mode 100644 block.go create mode 100755 build.sh create mode 100755 build_linux.sh delete mode 100644 cmd/argvtest/main.go delete mode 100644 cmd/ordercp/main.go create mode 100644 cmd/tape-httpd/config.example.yaml create mode 100644 cmd/tape-httpd/main.go create mode 100644 cmd/tape-httpd/tape-writer.service create mode 100644 cmd/tape-import/main.go create mode 100644 cmd/tape-loadtape/main.go delete mode 100644 cmd/writer/main.go create mode 100644 entity/copy_status.pb.go create mode 100644 entity/copy_status.proto create mode 100644 entity/file.pb.go create mode 100644 entity/file.proto create mode 100644 entity/job.go create mode 100644 entity/job.pb.go create mode 100644 entity/job.proto create mode 100644 entity/job_archive.pb.go create mode 100644 entity/job_archive.proto create mode 100644 entity/job_restore.pb.go create mode 100644 entity/job_restore.proto create mode 100644 entity/position.pb.go create mode 100644 entity/position.proto create mode 100644 entity/service.pb.go create mode 100644 entity/service.proto create mode 100755 entity/service_gen.sh create mode 100644 entity/service_grpc.pb.go create mode 100644 entity/source.go create mode 100644 entity/source.pb.go create mode 100644 entity/source.proto create mode 100644 entity/tape.pb.go create mode 100644 entity/tape.proto create mode 100644 entity/utils.go create mode 100644 executor/executor.go create mode 100644 executor/job.go create mode 100644 executor/job_archive_display.go create mode 100644 executor/job_archive_exe.go create mode 100644 executor/job_archive_param.go create mode 100644 executor/job_archive_start.go create mode 100644 executor/job_restore.go create mode 100644 executor/key.go create mode 100644 executor/log.go create mode 100644 executor/progress.go create mode 100644 external/external.go create mode 100644 external/from_json.go create mode 100644 frontend/.gitignore create mode 100644 frontend/.prettierrc.json create mode 100644 frontend/index.html create mode 100644 frontend/package.json create mode 100644 frontend/pnpm-lock.yaml create mode 100644 frontend/public/vite.svg create mode 100644 frontend/src/actions.ts create mode 100644 frontend/src/api.ts create mode 100644 frontend/src/app.less create mode 100644 frontend/src/app.tsx create mode 100644 frontend/src/assets/react.svg create mode 100644 frontend/src/backup.tsx create mode 100644 frontend/src/detail.less create mode 100644 frontend/src/detail.tsx create mode 100644 frontend/src/entity/copy_status.ts create mode 100644 frontend/src/entity/file.ts create mode 100755 frontend/src/entity/gen_index.sh create mode 100644 frontend/src/entity/index.ts create mode 100644 frontend/src/entity/job.ts create mode 100644 frontend/src/entity/job_archive.ts create mode 100644 frontend/src/entity/job_restore.ts create mode 100644 frontend/src/entity/position.ts create mode 100644 frontend/src/entity/service.client.ts create mode 100644 frontend/src/entity/service.ts create mode 100644 frontend/src/entity/source.ts create mode 100644 frontend/src/entity/tape.ts create mode 100644 frontend/src/file.tsx create mode 100644 frontend/src/index.css create mode 100644 frontend/src/init.tsx create mode 100644 frontend/src/jobs.tsx create mode 100644 frontend/src/main.tsx create mode 100644 frontend/src/tools.ts create mode 100644 frontend/src/vite-env.d.ts create mode 100644 frontend/tsconfig.json create mode 100644 frontend/tsconfig.node.json create mode 100644 frontend/vite.config.ts delete mode 100755 maketape delete mode 100644 mmap/manual_test_program.go delete mode 100644 mmap/mmap_darwin.go delete mode 100644 mmap/mmap_linux.go delete mode 100644 mmap/mmap_other.go delete mode 100644 mmap/mmap_test.go delete mode 100644 mmap/mmap_windows.go create mode 100755 scripts/encrypt rename encrypttape => scripts/encrypttape (100%) create mode 100755 scripts/maketape create mode 100755 scripts/mkfs create mode 100755 scripts/mount create mode 100755 scripts/mounttape rename test.sh => scripts/test.sh (61%) create mode 100755 scripts/umount delete mode 100644 sg_tools.go rename consts.go => sgtape/consts.go (99%) create mode 100644 sgtape/page.go create mode 100644 sgtape/sg_tools.go rename tools.go => sgtape/tools.go (97%) rename writer.go => sgtape/writer.go.bak (75%) create mode 100644 tapechanger/changer.go create mode 100644 tools/command.go create mode 100644 tools/filesystem.go create mode 100644 tools/filesystem_test.go create mode 100644 tools/pprof.go create mode 100644 tools/recover.go create mode 100644 tools/strings.go diff --git a/.gitignore b/.gitignore index d5a7e4b..2beda93 100644 --- a/.gitignore +++ b/.gitignore @@ -14,3 +14,8 @@ # Dependency directories (remove the comment below to include it) # vendor/ output/ +frontend/node_modules/ +client/node_modules/ + +tapes.db +upload_test.sh diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..e7bc266 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,7 @@ +{ + "protoc": { + "options": [ + "--proto_path=./entity" + ] + } +} \ No newline at end of file diff --git a/apis/api.go b/apis/api.go new file mode 100644 index 0000000..229fd68 --- /dev/null +++ b/apis/api.go @@ -0,0 +1,21 @@ +package apis + +import ( + "github.com/abc950309/tapewriter/entity" + "github.com/abc950309/tapewriter/executor" + "github.com/abc950309/tapewriter/library" +) + +// JobGet(context.Context, *entity.JobGetRequest) (*entity.JobGetReply, error) + +type API struct { + entity.UnimplementedServiceServer + + lib *library.Library + exe *executor.Executor + sourceBase string +} + +func New(base string, lib *library.Library, exe *executor.Executor) *API { + return &API{lib: lib, exe: exe, sourceBase: base} +} diff --git a/apis/converts.go b/apis/converts.go new file mode 100644 index 0000000..fb57fe3 --- /dev/null +++ b/apis/converts.go @@ -0,0 +1,98 @@ +package apis + +import ( + "io/fs" + "path" + "path/filepath" + "time" + + "github.com/abc950309/tapewriter/entity" + "github.com/abc950309/tapewriter/executor" + "github.com/abc950309/tapewriter/library" +) + +func convertFiles(files ...*library.File) []*entity.File { + results := make([]*entity.File, 0, len(files)) + for _, f := range files { + results = append(results, &entity.File{ + Id: f.ID, + ParentId: f.ParentID, + Name: f.Name, + Mode: int64(f.Mode), + ModTime: f.ModTime.Unix(), + Size: f.Size, + Hash: f.Hash, + }) + } + return results +} + +func convertPositions(positions ...*library.Position) []*entity.Position { + results := make([]*entity.Position, 0, len(positions)) + for _, p := range positions { + results = append(results, &entity.Position{ + Id: p.ID, + FileId: p.FileID, + TapeId: p.TapeID, + Path: p.Path, + Mode: int64(p.Mode), + ModTime: p.ModTime.Unix(), + WriteTime: p.WriteTime.Unix(), + Size: p.Size, + Hash: p.Hash, + }) + } + return results +} + +func convertSourceFiles(parent string, files ...fs.FileInfo) []*entity.SourceFile { + results := make([]*entity.SourceFile, 0, len(files)) + for _, f := range files { + if !f.Mode().IsDir() && !f.Mode().IsRegular() { + continue + } + + _, file := path.Split(f.Name()) + results = append(results, &entity.SourceFile{ + Path: filepath.Join(parent, file), + ParentPath: parent, + Name: file, + Mode: int64(f.Mode()), + ModTime: f.ModTime().Unix(), + Size: f.Size(), + }) + } + return results +} + +func convertJobs(jobs ...*executor.Job) []*entity.Job { + converted := make([]*entity.Job, 0, len(jobs)) + for _, job := range jobs { + converted = append(converted, &entity.Job{ + Id: job.ID, + Status: job.Status, + Priority: job.Priority, + CreateTime: job.CreateTime.Unix(), + UpdateTime: job.UpdateTime.Unix(), + State: job.State, + }) + } + return converted +} + +func convertOptionalTime(t *time.Time) *int64 { + if t == nil { + return nil + } + + u := t.Unix() + return &u +} + +func map2list[K, T comparable](mapping map[K]T) []T { + result := make([]T, 0, len(mapping)) + for _, v := range mapping { + result = append(result, v) + } + return result +} diff --git a/apis/device_list.go b/apis/device_list.go new file mode 100644 index 0000000..b8b8fe9 --- /dev/null +++ b/apis/device_list.go @@ -0,0 +1,11 @@ +package apis + +import ( + "context" + + "github.com/abc950309/tapewriter/entity" +) + +func (api *API) DeviceList(ctx context.Context, req *entity.DeviceListRequest) (*entity.DeviceListReply, error) { + return &entity.DeviceListReply{Devices: api.exe.ListAvailableDevices()}, nil +} diff --git a/apis/file_delete.go b/apis/file_delete.go new file mode 100644 index 0000000..a7c3a7a --- /dev/null +++ b/apis/file_delete.go @@ -0,0 +1,16 @@ +package apis + +import ( + "context" + + "github.com/abc950309/tapewriter/entity" + mapset "github.com/deckarep/golang-set/v2" +) + +func (api *API) FileDelete(ctx context.Context, req *entity.FileDeleteRequest) (*entity.FileDeleteReply, error) { + ids := mapset.NewThreadUnsafeSet(req.Ids...) + if err := api.lib.Delete(ctx, ids.ToSlice()); err != nil { + return nil, err + } + return new(entity.FileDeleteReply), nil +} diff --git a/apis/file_edit.go b/apis/file_edit.go new file mode 100644 index 0000000..eca8972 --- /dev/null +++ b/apis/file_edit.go @@ -0,0 +1,54 @@ +package apis + +import ( + "context" + "fmt" + "io/fs" + "path" + "strings" + + "github.com/abc950309/tapewriter/entity" +) + +func (api *API) FileEdit(ctx context.Context, req *entity.FileEditRequest) (*entity.FileEditReply, error) { + file, err := api.lib.GetFile(ctx, req.Id) + if err != nil { + return nil, err + } + if file == nil { + return nil, fmt.Errorf("file not found, id= %d", req.Id) + } + + if req.File.ParentId != nil { + file.ParentID = *req.File.ParentId + } + if req.File.Name != nil { + name := strings.TrimSpace(*req.File.Name) + if name == "" { + return nil, fmt.Errorf("unexpected target name, not a string") + } + + if !strings.ContainsAny(name, `\/`) { + file.Name = name + } else { + name = path.Clean(strings.ReplaceAll(name, `\`, `/`)) + + dirname, filename := path.Split(name) + if filename == "" { + return nil, fmt.Errorf("unexpected target name, end with slash, '%s'", name) + } + + dir, err := api.lib.MkdirAll(ctx, file.ParentID, dirname, fs.ModePerm) + if err != nil { + return nil, err + } + + file.ParentID = dir.ID + } + } + + if err := api.lib.MoveFile(ctx, file); err != nil { + return nil, err + } + return &entity.FileEditReply{File: convertFiles(file)[0]}, nil +} diff --git a/apis/file_get.go b/apis/file_get.go new file mode 100644 index 0000000..43b5447 --- /dev/null +++ b/apis/file_get.go @@ -0,0 +1,37 @@ +package apis + +import ( + "context" + "errors" + + "github.com/abc950309/tapewriter/entity" + "github.com/abc950309/tapewriter/library" +) + +func (api *API) FileGet(ctx context.Context, req *entity.FileGetRequest) (*entity.FileGetReply, error) { + libFile, err := api.lib.GetFile(ctx, req.Id) + if err != nil && !errors.Is(err, library.ErrFileNotFound) { + return nil, err + } + + var file *entity.File + if libFile != nil { + file = convertFiles(libFile)[0] + } + + positions, err := api.lib.GetPositionByFileID(ctx, req.Id) + if err != nil { + return nil, err + } + + children, err := api.lib.List(ctx, req.Id) + if err != nil { + return nil, err + } + + return &entity.FileGetReply{ + File: file, + Positions: convertPositions(positions...), + Children: convertFiles(children...), + }, nil +} diff --git a/apis/file_list_parents.go b/apis/file_list_parents.go new file mode 100644 index 0000000..8307e1b --- /dev/null +++ b/apis/file_list_parents.go @@ -0,0 +1,16 @@ +package apis + +import ( + "context" + + "github.com/abc950309/tapewriter/entity" +) + +func (api *API) FileListParents(ctx context.Context, req *entity.FileListParentsRequest) (*entity.FileListParentsReply, error) { + files, err := api.lib.ListParents(ctx, req.Id) + if err != nil { + return nil, err + } + + return &entity.FileListParentsReply{Parents: convertFiles(files...)}, nil +} diff --git a/apis/file_mkdir.go b/apis/file_mkdir.go new file mode 100644 index 0000000..d4a518e --- /dev/null +++ b/apis/file_mkdir.go @@ -0,0 +1,28 @@ +package apis + +import ( + "context" + "fmt" + "io/fs" + + "github.com/abc950309/tapewriter/entity" +) + +func (api *API) FileMkdir(ctx context.Context, req *entity.FileMkdirRequest) (*entity.FileMkdirReply, error) { + if req.ParentId != 0 { + parent, err := api.lib.GetFile(ctx, req.ParentId) + if err != nil || parent == nil { + return nil, err + } + if parent == nil { + return nil, fmt.Errorf("file not found, id= %d", req.ParentId) + } + } + + dir, err := api.lib.MkdirAll(ctx, req.ParentId, req.Path, fs.ModePerm) + if err != nil { + return nil, err + } + + return &entity.FileMkdirReply{File: convertFiles(dir)[0]}, nil +} diff --git a/apis/job_create.go b/apis/job_create.go new file mode 100644 index 0000000..6d1576e --- /dev/null +++ b/apis/job_create.go @@ -0,0 +1,24 @@ +package apis + +import ( + "context" + + "github.com/abc950309/tapewriter/entity" + "github.com/abc950309/tapewriter/executor" +) + +func (api *API) JobCreate(ctx context.Context, req *entity.JobCreateRequest) (*entity.JobCreateReply, error) { + job, err := api.exe.CreateJob(ctx, &executor.Job{ + Status: entity.JobStatus_Pending, + Priority: req.Job.Priority, + }, req.Job.Param) + if err != nil { + return nil, err + } + + if err := api.exe.Start(ctx, job); err != nil { + return nil, err + } + + return &entity.JobCreateReply{Job: convertJobs(job)[0]}, nil +} diff --git a/apis/job_display.go b/apis/job_display.go new file mode 100644 index 0000000..eeac207 --- /dev/null +++ b/apis/job_display.go @@ -0,0 +1,21 @@ +package apis + +import ( + "context" + + "github.com/abc950309/tapewriter/entity" +) + +func (api *API) JobDisplay(ctx context.Context, req *entity.JobDisplayRequest) (*entity.JobDisplayReply, error) { + job, err := api.exe.GetJob(ctx, req.Id) + if err != nil { + return &entity.JobDisplayReply{}, nil + } + + result, err := api.exe.Display(ctx, job) + if err != nil { + return &entity.JobDisplayReply{}, nil + } + + return &entity.JobDisplayReply{Display: result}, nil +} diff --git a/apis/job_get_log.go b/apis/job_get_log.go new file mode 100644 index 0000000..d397125 --- /dev/null +++ b/apis/job_get_log.go @@ -0,0 +1,32 @@ +package apis + +import ( + "context" + "fmt" + "io" + + "github.com/abc950309/tapewriter/entity" +) + +func (api *API) JobGetLog(ctx context.Context, req *entity.JobGetLogRequest) (*entity.JobGetLogReply, error) { + reader, err := api.exe.NewLogReader(req.JobId) + if err != nil { + return nil, fmt.Errorf("open log fail, %w", err) + } + if reader == nil { + return &entity.JobGetLogReply{Logs: []byte{}}, nil + } + + if req.Offset > 0 { + if _, err := reader.Seek(req.Offset, 0); err != nil { + return nil, fmt.Errorf("seek log file fail, offset= %d, %w", req.Offset, err) + } + } + + buf, err := io.ReadAll(reader) + if err != nil { + return nil, fmt.Errorf("read log fail, %w", err) + } + + return &entity.JobGetLogReply{Logs: buf}, nil +} diff --git a/apis/job_list.go b/apis/job_list.go new file mode 100644 index 0000000..8340fae --- /dev/null +++ b/apis/job_list.go @@ -0,0 +1,27 @@ +package apis + +import ( + "context" + "fmt" + + "github.com/abc950309/tapewriter/entity" +) + +func (api *API) JobList(ctx context.Context, req *entity.JobListRequest) (*entity.JobListReply, error) { + switch param := req.Param.(type) { + case *entity.JobListRequest_Mget: + jobs, err := api.exe.MGetJob(ctx, param.Mget.Ids...) + if err != nil { + return nil, err + } + return &entity.JobListReply{Jobs: convertJobs(map2list(jobs)...)}, nil + case *entity.JobListRequest_List: + jobs, err := api.exe.ListJob(ctx, param.List) + if err != nil { + return nil, err + } + return &entity.JobListReply{Jobs: convertJobs(jobs...)}, nil + default: + return nil, fmt.Errorf("unexpected param, %T", req.Param) + } +} diff --git a/apis/job_next.go b/apis/job_next.go new file mode 100644 index 0000000..83d0ae1 --- /dev/null +++ b/apis/job_next.go @@ -0,0 +1,20 @@ +package apis + +import ( + "context" + + "github.com/abc950309/tapewriter/entity" +) + +func (api *API) JobNext(ctx context.Context, req *entity.JobNextRequest) (*entity.JobNextReply, error) { + job, err := api.exe.GetJob(ctx, req.Id) + if err != nil { + return nil, err + } + + if err := api.exe.Submit(ctx, job, req.Param); err != nil { + return nil, err + } + + return &entity.JobNextReply{Job: convertJobs(job)[0]}, nil +} diff --git a/apis/source_list.go b/apis/source_list.go new file mode 100644 index 0000000..8d36c6f --- /dev/null +++ b/apis/source_list.go @@ -0,0 +1,88 @@ +package apis + +import ( + "context" + "fmt" + "io/fs" + "os" + "path" + "strings" + + "github.com/abc950309/tapewriter/entity" +) + +func (api *API) SourceList(ctx context.Context, req *entity.SourceListRequest) (*entity.SourceListReply, error) { + if req.Path == "./" { + req.Path = "" + } + + parts := strings.Split(req.Path, "/") + filteredParts := make([]string, 1, len(parts)+1) + filteredParts[0] = "" + for _, part := range parts { + if part == "" { + continue + } + filteredParts = append(filteredParts, part) + } + + // buf, _ := json.Marshal(filteredParts) + // logrus.WithContext(ctx).Infof("parts= %s", buf) + + current := "" + chain := make([]*entity.SourceFile, 0, len(filteredParts)) + for _, part := range filteredParts { + p := path.Join(api.sourceBase, current, part) + + stat, err := os.Stat(p) + if err != nil { + return nil, err + } + + files := convertSourceFiles(current, stat) + if len(files) == 0 { + return nil, fmt.Errorf("unexpected file, %s", current+part) + } + + file := files[0] + chain = append(chain, file) + + if !fs.FileMode(file.Mode).IsDir() { + break + } + + current = path.Join(current, part) + } + if len(chain) == 0 { + return nil, fmt.Errorf("unexpected file, '%s'", req.Path) + } + + chain[0].Path = "./" + chain[0].Name = "Root" + file := chain[len(chain)-1] + reply := &entity.SourceListReply{ + File: file, + Chain: chain, + } + if !fs.FileMode(file.Mode).IsDir() { + return reply, nil + } + + dir := path.Join(api.sourceBase, req.Path) + children, err := os.ReadDir(dir) + if err != nil { + return nil, err + } + + infos := make([]fs.FileInfo, 0, len(children)) + for _, child := range children { + info, err := child.Info() + if err != nil { + return nil, err + } + infos = append(infos, info) + } + + reply.Children = convertSourceFiles(req.Path, infos...) + return reply, nil +} diff --git a/apis/tape_get.go b/apis/tape_get.go new file mode 100644 index 0000000..f3a3fbf --- /dev/null +++ b/apis/tape_get.go @@ -0,0 +1,30 @@ +package apis + +import ( + "context" + + "github.com/abc950309/tapewriter/entity" +) + +func (api *API) TapeMGet(ctx context.Context, req *entity.TapeMGetRequest) (*entity.TapeMGetReply, error) { + tapes, err := api.lib.MGetTape(ctx, req.Ids...) + if err != nil { + return nil, err + } + + converted := make([]*entity.Tape, 0, len(tapes)) + for _, tape := range tapes { + converted = append(converted, &entity.Tape{ + Id: tape.ID, + Barcode: tape.Barcode, + Name: tape.Name, + Encryption: tape.Encryption, + CreateTime: tape.CreateTime.Unix(), + DestroyTime: convertOptionalTime(tape.DestroyTime), + CapacityBytes: tape.CapacityBytes, + WritenBytes: tape.WritenBytes, + }) + } + + return &entity.TapeMGetReply{Tapes: converted}, nil +} diff --git a/block.go b/block.go deleted file mode 100644 index cc46c71..0000000 --- a/block.go +++ /dev/null @@ -1,78 +0,0 @@ -package tapewriter - -import ( - "io" - "os" - "sync" - "syscall" -) - -var ( - _ = io.WriteCloser(new(BlockWriter)) -) - -type BlockWriter struct { - target uintptr - blockSize int - buffer chan []byte - pool sync.Pool - closed sync.WaitGroup - - current []byte - off int -} - -func NewBlockWriter(tape *os.File, blockSize, bufferBlocks int) *BlockWriter { - w := &BlockWriter{ - target: tape.Fd(), - blockSize: blockSize, - buffer: make(chan []byte, bufferBlocks), - current: make([]byte, blockSize), - pool: sync.Pool{New: func() interface{} { return make([]byte, blockSize) }}, - } - - w.closed.Add(1) - go w.loop() - return w -} - -func (w *BlockWriter) Write(buf []byte) (int, error) { - var n, cn int - for len(buf) > 0 { - cn = copy(w.current, buf) - buf = buf[cn:] - w.off += cn - n += cn - - if w.off >= w.blockSize { - w.buffer <- w.current - w.current = w.pool.Get().([]byte) - } - } - - return n, nil -} - -func (w *BlockWriter) Close() error { - w.buffer <- w.current[:w.off] - close(w.buffer) - - w.closed.Wait() - return nil -} - -func (w *BlockWriter) loop() { - defer w.closed.Done() - - for { - buf, ok := <-w.buffer - if !ok { - break - } - - _, err := syscall.Write(int(w.target), buf) - if err != nil { - panic(err) - } - } -} diff --git a/build.sh b/build.sh new file mode 100755 index 0000000..ce116ed --- /dev/null +++ b/build.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash +set -e; + +CURDIR=$(cd $(dirname $0); pwd); +cd ${CURDIR}; + +rm -rf output; +mkdir -p output; +go build -o ./output/httpd ./cmd/tape-httpd; +go build -o ./output/loadtape ./cmd/tape-loadtape; +go build -o ./output/import ./cmd/tape-import; + +cp -r scripts ./output/; +cp -r ./frontend/dist ./output/frontend; +cp ./cmd/tape-httpd/tape-writer.service ./output/ +cp ./cmd/tape-httpd/config.example.yaml ./output/ diff --git a/build_linux.sh b/build_linux.sh new file mode 100755 index 0000000..c50cd05 --- /dev/null +++ b/build_linux.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash +set -e; + +CURDIR=$(cd $(dirname $0); pwd); +cd ${CURDIR}; + +docker run --rm -v $(pwd):/app golang:1.19 sh -c "cd /app && bash build.sh" diff --git a/cmd/argvtest/main.go b/cmd/argvtest/main.go deleted file mode 100644 index f5f563d..0000000 --- a/cmd/argvtest/main.go +++ /dev/null @@ -1,11 +0,0 @@ -package main - -import ( - "os" - - "github.com/davecgh/go-spew/spew" -) - -func main() { - spew.Dump(os.Args) -} diff --git a/cmd/ordercp/main.go b/cmd/ordercp/main.go deleted file mode 100644 index a779006..0000000 --- a/cmd/ordercp/main.go +++ /dev/null @@ -1,437 +0,0 @@ -package main - -import ( - "context" - "encoding/json" - "fmt" - "hash" - "io" - "os" - "os/signal" - "strings" - "sync" - "sync/atomic" - "time" - - "github.com/abc950309/tapewriter/library" - "github.com/abc950309/tapewriter/mmap" - "github.com/minio/sha256-simd" - "github.com/schollz/progressbar/v3" - "github.com/sirupsen/logrus" -) - -const ( - unexpectFileMode = os.ModeType &^ os.ModeDir - batchSize = 1024 * 1024 -) - -var ( - shaPool = &sync.Pool{New: func() interface{} { return sha256.New() }} -) - -func main() { - src, dst := os.Args[1], os.Args[2] - c, err := NewCopyer(dst, src) - if err != nil { - panic(err) - } - c.Run() - - if p := os.Getenv("ORDERCP_REPORT_PATH"); p != "" { - errs := make([]string, 0, len(c.errs)) - for _, e := range c.errs { - errs = append(errs, e.Error()) - } - report, _ := json.Marshal(map[string]interface{}{"errors": errs, "files": c.results}) - - n := os.Getenv("ORDERCP_REPORT_FILENAME") - if n == "" { - n = time.Now().Format("2006-01-02T15:04:05.999999.csv") - } - - r, err := os.Create(fmt.Sprintf("%s/%s", p, n)) - if err != nil { - logrus.Warnf("open report fail, path= '%s', err= %w", fmt.Sprintf("%s/%s", p, n), err) - logrus.Infof("report: %s", report) - return - } - defer r.Close() - - r.Write(report) - } -} - -type Copyer struct { - bar *progressbar.ProgressBar - src []string - dst string - copyed int64 - num int64 - files []*Job - errs []error - copyPipe chan *CopyJob - changePipe chan *Job - - results []*library.TapeFile -} - -func NewCopyer(dst string, src ...string) (*Copyer, error) { - dst = strings.TrimSpace(dst) - if dst == "" { - return nil, fmt.Errorf("dst not found") - } - if dst[len(dst)-1] != '/' { - dst = dst + "/" - } - - filtered := make([]string, 0, len(src)) - for _, s := range src { - s = strings.TrimSpace(s) - if s == "" { - continue - } - - srcStat, err := os.Stat(s) - if err != nil { - return nil, fmt.Errorf("check src path '%s', %w", src, err) - } - if srcStat.IsDir() && s[len(s)-1] != '/' { - s = s + "/" - } - - filtered = append(filtered, s) - } - if len(filtered) == 0 { - return nil, fmt.Errorf("src not found") - } - src = filtered - - dstStat, err := os.Stat(dst) - if err != nil { - return nil, fmt.Errorf("check dst path '%s', %w", dst, err) - } - if !dstStat.IsDir() { - return nil, fmt.Errorf("dst path is not a dir") - } - - c := &Copyer{ - dst: dst, src: src, - copyPipe: make(chan *CopyJob, 32), - changePipe: make(chan *Job, 8), - } - for _, s := range c.src { - c.walk(s, "", true) - } - - var total int64 - for _, file := range c.files { - total += file.Size - } - c.bar = progressbar.DefaultBytes(total) - - return c, nil -} - -func (c *Copyer) Run() { - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - - signals := make(chan os.Signal, 1) - signal.Notify(signals, os.Interrupt) - go func() { - for sig := range signals { - if sig != os.Interrupt { - continue - } - cancel() - } - }() - - go func() { - ticker := time.NewTicker(time.Millisecond * 500) - defer ticker.Stop() - - last := int64(0) - for range ticker.C { - current := atomic.LoadInt64(&c.copyed) - c.bar.Add(int(current - last)) - last = current - - select { - case <-ctx.Done(): - close(c.copyPipe) - return - default: - } - } - }() - - go func() { - for _, file := range c.files { - c.prepare(ctx, file) - - select { - case <-ctx.Done(): - close(c.copyPipe) - return - default: - } - } - close(c.copyPipe) - }() - - go func() { - for copyer := range c.copyPipe { - hash, err := c.copy(ctx, copyer) - if err != nil { - c.ReportError(c.dst+copyer.Path, err) - if err := os.Remove(c.dst + copyer.Path); err != nil { - c.ReportError(c.dst+copyer.Path, fmt.Errorf("delete file with error fail, %w", err)) - } - } else { - if !copyer.Mode.IsDir() { - c.results = append(c.results, &library.TapeFile{ - Path: copyer.Path, - Size: copyer.Size, - Mode: copyer.Mode, - ModTime: copyer.ModTime, - WriteTime: time.Now(), - Hash: hash, - }) - } - } - - select { - case <-ctx.Done(): - close(c.changePipe) - return - default: - } - } - close(c.changePipe) - }() - - for file := range c.changePipe { - c.changeInfo(file) - } -} - -func (c *Copyer) ReportError(file string, err error) { - logrus.Errorf("'%s', %s", file, err) - c.errs = append(c.errs, fmt.Errorf("'%s': %w", file, err)) -} - -func (c *Copyer) walk(src, path string, first bool) { - name := src + path - - stat, err := os.Stat(name) - if err != nil { - c.ReportError(name, fmt.Errorf("walk get stat, %w", err)) - return - } - - job := NewJobFromFileInfo(src, path, stat) - if job.Mode&unexpectFileMode != 0 { - return - } - - if !job.Mode.IsDir() { - c.num++ - job.Number = c.num - c.files = append(c.files, job) - return - } - if first { - files, err := os.ReadDir(name) - if err != nil { - c.ReportError(name, fmt.Errorf("walk read dir, %w", err)) - return - } - - for _, file := range files { - c.walk(src, file.Name(), false) - } - return - } - - enterJob := new(Job) - *enterJob = *job - enterJob.Type = JobTypeEnterDir - c.files = append(c.files, enterJob) - - files, err := os.ReadDir(name) - if err != nil { - c.ReportError(name, fmt.Errorf("walk read dir, %w", err)) - return - } - - for _, file := range files { - c.walk(src, path+"/"+file.Name(), false) - } - - exitJob := new(Job) - *exitJob = *job - exitJob.Type = JobTypeExitDir - c.files = append(c.files, exitJob) -} - -func (c *Copyer) prepare(ctx context.Context, job *Job) { - switch job.Type { - case JobTypeEnterDir: - name := c.dst + job.Path - err := os.Mkdir(name, job.Mode&os.ModePerm) - if err != nil { - c.ReportError(name, fmt.Errorf("mkdir fail, %w", err)) - return - } - return - case JobTypeExitDir: - c.copyPipe <- &CopyJob{Job: job} - return - } - - name := job.Source + job.Path - file, err := mmap.Open(name) - if err != nil { - c.ReportError(name, fmt.Errorf("open src file fail, %w", err)) - return - } - - c.copyPipe <- &CopyJob{Job: job, src: file} -} - -func (c *Copyer) copy(ctx context.Context, job *CopyJob) ([]byte, error) { - if job.src == nil { - c.changePipe <- job.Job - return nil, nil - } - defer job.src.Close() - - name := c.dst + job.Path - file, err := os.Create(name) - if err != nil { - return nil, fmt.Errorf("open dst file fail, %w", err) - } - defer file.Close() - - c.bar.Describe(fmt.Sprintf("[%d/%d]: %s", job.Number, c.num, job.Path)) - hash, err := c.streamCopy(ctx, file, job.src) - if err != nil { - return nil, fmt.Errorf("copy file fail, %w", err) - } - - c.changePipe <- job.Job - return hash, nil -} - -func (c *Copyer) changeInfo(info *Job) { - name := c.dst + info.Path - - if err := os.Chmod(name, info.Mode&os.ModePerm); err != nil { - c.ReportError(name, fmt.Errorf("change info, chmod fail, %w", err)) - } - if err := os.Chtimes(name, info.ModTime, info.ModTime); err != nil { - c.ReportError(name, fmt.Errorf("change info, chtimes fail, %w", err)) - } -} - -func (c *Copyer) streamCopy(ctx context.Context, dst io.Writer, src *mmap.ReaderAt) (h []byte, err error) { - if src.Len() == 0 { - return nil, nil - } - - sha := shaPool.Get().(hash.Hash) - sha.Reset() - defer shaPool.Put(sha) - - var wg sync.WaitGroup - hashChan := make(chan []byte, 4) - defer func() { - close(hashChan) - if err != nil { - return - } - - wg.Wait() - h = sha.Sum(nil) - }() - - wg.Add(1) - go func() { - defer wg.Done() - for buf := range hashChan { - sha.Write(buf) - } - }() - - err = func() error { - for idx := int64(0); ; idx += batchSize { - buf, err := src.Slice(idx, batchSize) - if err != nil { - return fmt.Errorf("slice mmap fail, %w", err) - } - nr := len(buf) - hashChan <- buf - - nw, ew := dst.Write(buf) - if nw < 0 || nr < nw { - nw = 0 - if ew == nil { - return fmt.Errorf("write fail, unexpected return, byte_num= %d", nw) - } - return fmt.Errorf("write fail, %w", ew) - } - if nr != nw { - return fmt.Errorf("write fail, write and read bytes not equal, read= %d write= %d", nr, nw) - } - - atomic.AddInt64(&c.copyed, int64(nr)) - if len(buf) < batchSize { - return nil - } - - select { - case <-ctx.Done(): - return ctx.Err() - default: - } - } - }() - return -} - -type JobType uint8 - -const ( - JobTypeNormal = JobType(iota) - JobTypeEnterDir - JobTypeExitDir -) - -type Job struct { - Source string - Path string - Type JobType - Number int64 - Name string // base name of the file - Size int64 // length in bytes for regular files; system-dependent for others - Mode os.FileMode // file mode bits - ModTime time.Time // modification time -} - -func NewJobFromFileInfo(src, path string, info os.FileInfo) *Job { - job := &Job{ - Source: src, - Path: path, - Name: info.Name(), - Size: info.Size(), - Mode: info.Mode(), - ModTime: info.ModTime(), - } - return job -} - -type CopyJob struct { - *Job - src *mmap.ReaderAt -} diff --git a/cmd/tape-httpd/config.example.yaml b/cmd/tape-httpd/config.example.yaml new file mode 100644 index 0000000..2e7b2bb --- /dev/null +++ b/cmd/tape-httpd/config.example.yaml @@ -0,0 +1,19 @@ +domain: http://127.0.0.1:8080 +listen: 127.0.0.1:8080 +debug_listen: 127.0.0.1:8081 +work_directory: ./ + +database: + dialect: sqlite + dsn: ./tapes.db + +tape_devices: + - /dev/nst0 + +filesystem_root: ./ + +scripts: + encrypt: ./scripts/encrypt + mkfs: ./scripts/mkfs + mount: ./scripts/mount + umount: ./scripts/umount diff --git a/cmd/tape-httpd/main.go b/cmd/tape-httpd/main.go new file mode 100644 index 0000000..a52148c --- /dev/null +++ b/cmd/tape-httpd/main.go @@ -0,0 +1,117 @@ +package main + +import ( + "bytes" + "context" + "flag" + "fmt" + "io/ioutil" + "log" + "net/http" + "os" + + "github.com/abc950309/tapewriter/apis" + "github.com/abc950309/tapewriter/entity" + "github.com/abc950309/tapewriter/executor" + "github.com/abc950309/tapewriter/library" + "github.com/abc950309/tapewriter/resource" + "github.com/abc950309/tapewriter/tools" + "github.com/improbable-eng/grpc-web/go/grpcweb" + "google.golang.org/grpc" + "gopkg.in/yaml.v2" +) + +type config struct { + Domain string `yaml:"domain"` + Listen string `yaml:"listen"` + DebugListen string `yaml:"debug_listen"` + WorkDirectory string `yaml:"work_directory"` + + Database struct { + Dialect string `yaml:"dialect"` + DSN string `yaml:"dsn"` + } `yaml:"database"` + + TapeDevices []string `yaml:"tape_devices"` + FilesystemRoot string `yaml:"filesystem_root"` + + Scripts struct { + Encrypt string `yaml:"encrypt"` + Mkfs string `yaml:"mkfs"` + Mount string `yaml:"mount"` + Umount string `yaml:"umount"` + } `yaml:"scripts"` +} + +var ( + configPath = flag.String("config", "./config.yaml", "config file path") +) + +func main() { + flag.Parse() + + cf, err := os.Open(*configPath) + if err != nil { + panic(err) + } + + conf := new(config) + if err := yaml.NewDecoder(cf).Decode(conf); err != nil { + panic(err) + } + + if conf.DebugListen != "" { + go tools.Wrap(context.Background(), func() { tools.NewDebugServer(conf.DebugListen) }) + } + + db, err := resource.NewDBConn(conf.Database.Dialect, conf.Database.DSN) + if err != nil { + panic(err) + } + + lib := library.New(db) + if err := lib.AutoMigrate(); err != nil { + panic(err) + } + + exe := executor.New( + db, lib, conf.TapeDevices, conf.WorkDirectory, + conf.Scripts.Encrypt, conf.Scripts.Mkfs, conf.Scripts.Mount, conf.Scripts.Umount, + ) + if err := exe.AutoMigrate(); err != nil { + panic(err) + } + + s := grpc.NewServer() + api := apis.New(conf.FilesystemRoot, lib, exe) + entity.RegisterServiceServer(s, api) + + mux := http.NewServeMux() + + grpcWebServer := grpcweb.WrapServer(s, grpcweb.WithOriginFunc(func(origin string) bool { return true })) + mux.Handle("/services/", http.StripPrefix("/services/", grpcWebServer)) + + fs := http.FileServer(http.Dir("./frontend/assets")) + mux.Handle("/assets/", http.StripPrefix("/assets/", fs)) + + indexBuf, err := ioutil.ReadFile("./frontend/index.html") + if err != nil { + panic(err) + } + + indexBuf = bytes.ReplaceAll(indexBuf, []byte("%%API_BASE%%"), []byte(fmt.Sprintf("%s/services", conf.Domain))) + mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "text/html; charset=utf-8") + w.Write(indexBuf) + }) + + srv := &http.Server{ + Handler: mux, + Addr: conf.Listen, + } + + log.Printf("http server listening at %v", srv.Addr) + if err := srv.ListenAndServe(); err != nil { + log.Fatalf("failed to serve: %v", err) + } +} diff --git a/cmd/tape-httpd/tape-writer.service b/cmd/tape-httpd/tape-writer.service new file mode 100644 index 0000000..debee56 --- /dev/null +++ b/cmd/tape-httpd/tape-writer.service @@ -0,0 +1,16 @@ +[Unit] +Description=Tape Writer Service +Documentation=https://github.com/abc950309/tapewriter/ +After=network.target + +[Service] +User=root +Type=simple +WorkingDirectory=/opt/tapewriter +ExecStart=/opt/tapewriter/httpd +Restart=always +RestartSec=15 +StartLimitInterval=0 + +[Install] +WantedBy=multi-user.target diff --git a/cmd/tape-import/main.go b/cmd/tape-import/main.go new file mode 100644 index 0000000..a2970ce --- /dev/null +++ b/cmd/tape-import/main.go @@ -0,0 +1,38 @@ +package main + +import ( + "context" + "os" + + "github.com/abc950309/tapewriter/external" + "github.com/abc950309/tapewriter/library" + "github.com/abc950309/tapewriter/resource" +) + +func main() { + ctx := context.Background() + + db, err := resource.NewDBConn("sqlite", "./tapes.db") + if err != nil { + panic(err) + } + + lib := library.New(db) + if err := lib.AutoMigrate(); err != nil { + panic(err) + } + + file := os.Args[1] + barcode := os.Args[2] + name := os.Args[3] + + f, err := os.Open(file) + if err != nil { + panic(err) + } + + ext := external.New(lib) + if err := ext.ImportACPReport(ctx, barcode, name, "file:tape.key", f); err != nil { + panic(err) + } +} diff --git a/cmd/tape-loadtape/main.go b/cmd/tape-loadtape/main.go new file mode 100644 index 0000000..244f6f2 --- /dev/null +++ b/cmd/tape-loadtape/main.go @@ -0,0 +1,89 @@ +package main + +import ( + "context" + "flag" + "fmt" + "os" + + "github.com/abc950309/tapewriter/executor" + "github.com/abc950309/tapewriter/library" + "github.com/abc950309/tapewriter/resource" + "gopkg.in/yaml.v2" +) + +type config struct { + WorkDirectory string `yaml:"work_directory"` + + Database struct { + Dialect string `yaml:"dialect"` + DSN string `yaml:"dsn"` + } `yaml:"database"` + + TapeDevices []string `yaml:"tape_devices"` + FilesystemRoot string `yaml:"filesystem_root"` + + Scripts struct { + Encrypt string `yaml:"encrypt"` + Mkfs string `yaml:"mkfs"` + Mount string `yaml:"mount"` + Umount string `yaml:"umount"` + } `yaml:"scripts"` +} + +var ( + configPath = flag.String("config", "./config.yaml", "config file path") + barcode = flag.String("barcode", "", "barcode for tape") + device = flag.String("device", "/dev/nst0", "barcode for tape") +) + +func main() { + flag.Parse() + + if *barcode == "" { + panic("expect barcode") + } + + cf, err := os.Open(*configPath) + if err != nil { + panic(err) + } + + conf := new(config) + if err := yaml.NewDecoder(cf).Decode(conf); err != nil { + panic(err) + } + + db, err := resource.NewDBConn(conf.Database.Dialect, conf.Database.DSN) + if err != nil { + panic(err) + } + + lib := library.New(db) + if err := lib.AutoMigrate(); err != nil { + panic(err) + } + + exe := executor.New( + db, lib, conf.TapeDevices, conf.WorkDirectory, + conf.Scripts.Encrypt, conf.Scripts.Mkfs, conf.Scripts.Mount, conf.Scripts.Umount, + ) + if err := exe.AutoMigrate(); err != nil { + panic(err) + } + + ctx := context.Background() + tapes, err := lib.MGetTapeByBarcode(ctx, *barcode) + if err != nil { + panic(err) + } + + tape := tapes[*barcode] + if tape == nil { + panic(fmt.Errorf("tape not found, barcode= %s", *barcode)) + } + + if err := exe.RestoreLoadTape(ctx, *device, tape); err != nil { + panic(err) + } +} diff --git a/cmd/writer/main.go b/cmd/writer/main.go deleted file mode 100644 index b850971..0000000 --- a/cmd/writer/main.go +++ /dev/null @@ -1,47 +0,0 @@ -package main - -import ( - "archive/tar" - "fmt" - "io" - "os" - - "github.com/abc950309/tapewriter" -) - -func main() { - f, err := os.OpenFile("/dev/st0", os.O_WRONLY, 0666) - if err != nil { - panic(err) - } - - w, err := tapewriter.NewWriter(f) - if err != nil { - panic(err) - } - - path := os.Args[1] - info, err := os.Stat(path) - if err != nil { - panic(err) - } - - target, err := os.Open(path) - if err != nil { - panic(err) - } - - w.WriteHeader(&tar.Header{ - Name: info.Name(), - Size: info.Size(), - }) - - // syscall.Write() - - written, err := io.Copy(w, target) - if err != nil { - panic(err) - } - - fmt.Println(written) -} diff --git a/entity/copy_status.pb.go b/entity/copy_status.pb.go new file mode 100644 index 0000000..cc7f2a3 --- /dev/null +++ b/entity/copy_status.pb.go @@ -0,0 +1,144 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.10 +// source: copy_status.proto + +package entity + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type CopyStatus int32 + +const ( + CopyStatus_Draft CopyStatus = 0 + CopyStatus_Pending CopyStatus = 1 // waiting in queue + CopyStatus_Running CopyStatus = 2 + CopyStatus_Staged CopyStatus = 3 + CopyStatus_Submited CopyStatus = 4 + CopyStatus_Failed CopyStatus = 255 +) + +// Enum value maps for CopyStatus. +var ( + CopyStatus_name = map[int32]string{ + 0: "Draft", + 1: "Pending", + 2: "Running", + 3: "Staged", + 4: "Submited", + 255: "Failed", + } + CopyStatus_value = map[string]int32{ + "Draft": 0, + "Pending": 1, + "Running": 2, + "Staged": 3, + "Submited": 4, + "Failed": 255, + } +) + +func (x CopyStatus) Enum() *CopyStatus { + p := new(CopyStatus) + *p = x + return p +} + +func (x CopyStatus) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (CopyStatus) Descriptor() protoreflect.EnumDescriptor { + return file_copy_status_proto_enumTypes[0].Descriptor() +} + +func (CopyStatus) Type() protoreflect.EnumType { + return &file_copy_status_proto_enumTypes[0] +} + +func (x CopyStatus) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use CopyStatus.Descriptor instead. +func (CopyStatus) EnumDescriptor() ([]byte, []int) { + return file_copy_status_proto_rawDescGZIP(), []int{0} +} + +var File_copy_status_proto protoreflect.FileDescriptor + +var file_copy_status_proto_rawDesc = []byte{ + 0x0a, 0x11, 0x63, 0x6f, 0x70, 0x79, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x12, 0x0b, 0x63, 0x6f, 0x70, 0x79, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, + 0x2a, 0x58, 0x0a, 0x0a, 0x43, 0x6f, 0x70, 0x79, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x09, + 0x0a, 0x05, 0x44, 0x72, 0x61, 0x66, 0x74, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x50, 0x65, 0x6e, + 0x64, 0x69, 0x6e, 0x67, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x52, 0x75, 0x6e, 0x6e, 0x69, 0x6e, + 0x67, 0x10, 0x02, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x67, 0x65, 0x64, 0x10, 0x03, 0x12, + 0x0c, 0x0a, 0x08, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x65, 0x64, 0x10, 0x04, 0x12, 0x0b, 0x0a, + 0x06, 0x46, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x10, 0xff, 0x01, 0x42, 0x28, 0x5a, 0x26, 0x67, 0x69, + 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x61, 0x62, 0x63, 0x39, 0x35, 0x30, 0x33, + 0x30, 0x39, 0x2f, 0x74, 0x61, 0x70, 0x65, 0x77, 0x72, 0x69, 0x74, 0x65, 0x72, 0x2f, 0x65, 0x6e, + 0x74, 0x69, 0x74, 0x79, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_copy_status_proto_rawDescOnce sync.Once + file_copy_status_proto_rawDescData = file_copy_status_proto_rawDesc +) + +func file_copy_status_proto_rawDescGZIP() []byte { + file_copy_status_proto_rawDescOnce.Do(func() { + file_copy_status_proto_rawDescData = protoimpl.X.CompressGZIP(file_copy_status_proto_rawDescData) + }) + return file_copy_status_proto_rawDescData +} + +var file_copy_status_proto_enumTypes = make([]protoimpl.EnumInfo, 1) +var file_copy_status_proto_goTypes = []interface{}{ + (CopyStatus)(0), // 0: copy_status.CopyStatus +} +var file_copy_status_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_copy_status_proto_init() } +func file_copy_status_proto_init() { + if File_copy_status_proto != nil { + return + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_copy_status_proto_rawDesc, + NumEnums: 1, + NumMessages: 0, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_copy_status_proto_goTypes, + DependencyIndexes: file_copy_status_proto_depIdxs, + EnumInfos: file_copy_status_proto_enumTypes, + }.Build() + File_copy_status_proto = out.File + file_copy_status_proto_rawDesc = nil + file_copy_status_proto_goTypes = nil + file_copy_status_proto_depIdxs = nil +} diff --git a/entity/copy_status.proto b/entity/copy_status.proto new file mode 100644 index 0000000..5c55167 --- /dev/null +++ b/entity/copy_status.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; +package copy_status; + +option go_package = "github.com/abc950309/tapewriter/entity"; + +enum CopyStatus { + Draft = 0; + Pending = 1; // waiting in queue + Running = 2; + Staged = 3; + Submited = 4; + + Failed = 255; +} diff --git a/entity/file.pb.go b/entity/file.pb.go new file mode 100644 index 0000000..420b095 --- /dev/null +++ b/entity/file.pb.go @@ -0,0 +1,274 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.10 +// source: file.proto + +package entity + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type File struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id int64 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + ParentId int64 `protobuf:"varint,2,opt,name=parent_id,json=parentId,proto3" json:"parent_id,omitempty"` + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + Mode int64 `protobuf:"varint,17,opt,name=mode,proto3" json:"mode,omitempty"` + ModTime int64 `protobuf:"varint,18,opt,name=mod_time,json=modTime,proto3" json:"mod_time,omitempty"` + Size int64 `protobuf:"varint,19,opt,name=size,proto3" json:"size,omitempty"` + Hash []byte `protobuf:"bytes,20,opt,name=hash,proto3" json:"hash,omitempty"` +} + +func (x *File) Reset() { + *x = File{} + if protoimpl.UnsafeEnabled { + mi := &file_file_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *File) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*File) ProtoMessage() {} + +func (x *File) ProtoReflect() protoreflect.Message { + mi := &file_file_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use File.ProtoReflect.Descriptor instead. +func (*File) Descriptor() ([]byte, []int) { + return file_file_proto_rawDescGZIP(), []int{0} +} + +func (x *File) GetId() int64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *File) GetParentId() int64 { + if x != nil { + return x.ParentId + } + return 0 +} + +func (x *File) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *File) GetMode() int64 { + if x != nil { + return x.Mode + } + return 0 +} + +func (x *File) GetModTime() int64 { + if x != nil { + return x.ModTime + } + return 0 +} + +func (x *File) GetSize() int64 { + if x != nil { + return x.Size + } + return 0 +} + +func (x *File) GetHash() []byte { + if x != nil { + return x.Hash + } + return nil +} + +type EditedFile struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ParentId *int64 `protobuf:"varint,2,opt,name=parent_id,json=parentId,proto3,oneof" json:"parent_id,omitempty"` + Name *string `protobuf:"bytes,3,opt,name=name,proto3,oneof" json:"name,omitempty"` +} + +func (x *EditedFile) Reset() { + *x = EditedFile{} + if protoimpl.UnsafeEnabled { + mi := &file_file_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *EditedFile) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*EditedFile) ProtoMessage() {} + +func (x *EditedFile) ProtoReflect() protoreflect.Message { + mi := &file_file_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use EditedFile.ProtoReflect.Descriptor instead. +func (*EditedFile) Descriptor() ([]byte, []int) { + return file_file_proto_rawDescGZIP(), []int{1} +} + +func (x *EditedFile) GetParentId() int64 { + if x != nil && x.ParentId != nil { + return *x.ParentId + } + return 0 +} + +func (x *EditedFile) GetName() string { + if x != nil && x.Name != nil { + return *x.Name + } + return "" +} + +var File_file_proto protoreflect.FileDescriptor + +var file_file_proto_rawDesc = []byte{ + 0x0a, 0x0a, 0x66, 0x69, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x04, 0x66, 0x69, + 0x6c, 0x65, 0x22, 0x9e, 0x01, 0x0a, 0x04, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1b, 0x0a, 0x09, 0x70, + 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, + 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, + 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x11, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x6d, 0x6f, 0x64, 0x65, + 0x12, 0x19, 0x0a, 0x08, 0x6d, 0x6f, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x12, 0x20, 0x01, + 0x28, 0x03, 0x52, 0x07, 0x6d, 0x6f, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x73, + 0x69, 0x7a, 0x65, 0x18, 0x13, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x12, + 0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x68, + 0x61, 0x73, 0x68, 0x22, 0x5e, 0x0a, 0x0a, 0x45, 0x64, 0x69, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, + 0x65, 0x12, 0x20, 0x0a, 0x09, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x08, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x49, 0x64, + 0x88, 0x01, 0x01, 0x12, 0x17, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x09, 0x48, 0x01, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x42, 0x0c, 0x0a, 0x0a, + 0x5f, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x42, 0x07, 0x0a, 0x05, 0x5f, 0x6e, + 0x61, 0x6d, 0x65, 0x42, 0x28, 0x5a, 0x26, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, + 0x6d, 0x2f, 0x61, 0x62, 0x63, 0x39, 0x35, 0x30, 0x33, 0x30, 0x39, 0x2f, 0x74, 0x61, 0x70, 0x65, + 0x77, 0x72, 0x69, 0x74, 0x65, 0x72, 0x2f, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x62, 0x06, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_file_proto_rawDescOnce sync.Once + file_file_proto_rawDescData = file_file_proto_rawDesc +) + +func file_file_proto_rawDescGZIP() []byte { + file_file_proto_rawDescOnce.Do(func() { + file_file_proto_rawDescData = protoimpl.X.CompressGZIP(file_file_proto_rawDescData) + }) + return file_file_proto_rawDescData +} + +var file_file_proto_msgTypes = make([]protoimpl.MessageInfo, 2) +var file_file_proto_goTypes = []interface{}{ + (*File)(nil), // 0: file.File + (*EditedFile)(nil), // 1: file.EditedFile +} +var file_file_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_file_proto_init() } +func file_file_proto_init() { + if File_file_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_file_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*File); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_file_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*EditedFile); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_file_proto_msgTypes[1].OneofWrappers = []interface{}{} + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_file_proto_rawDesc, + NumEnums: 0, + NumMessages: 2, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_file_proto_goTypes, + DependencyIndexes: file_file_proto_depIdxs, + MessageInfos: file_file_proto_msgTypes, + }.Build() + File_file_proto = out.File + file_file_proto_rawDesc = nil + file_file_proto_goTypes = nil + file_file_proto_depIdxs = nil +} diff --git a/entity/file.proto b/entity/file.proto new file mode 100644 index 0000000..880f255 --- /dev/null +++ b/entity/file.proto @@ -0,0 +1,19 @@ +syntax = "proto3"; +package file; +option go_package = "github.com/abc950309/tapewriter/entity"; + +message File { + int64 id = 1; + int64 parent_id = 2; + string name = 3; + + int64 mode = 17; + int64 mod_time = 18; + int64 size = 19; + bytes hash = 20; +} + +message EditedFile { + optional int64 parent_id = 2; + optional string name = 3; +} diff --git a/entity/job.go b/entity/job.go new file mode 100644 index 0000000..2bc169f --- /dev/null +++ b/entity/job.go @@ -0,0 +1,32 @@ +package entity + +import ( + "database/sql" + "database/sql/driver" +) + +var ( + _ = sql.Scanner(&JobParam{}) + _ = driver.Valuer(&JobParam{}) +) + +func (x *JobParam) Scan(src any) error { + return Scan(x, src) +} + +func (x *JobParam) Value() (driver.Value, error) { + return Value(x) +} + +var ( + _ = sql.Scanner(&JobState{}) + _ = driver.Valuer(&JobState{}) +) + +func (x *JobState) Scan(src any) error { + return Scan(x, src) +} + +func (x *JobState) Value() (driver.Value, error) { + return Value(x) +} diff --git a/entity/job.pb.go b/entity/job.pb.go new file mode 100644 index 0000000..3414107 --- /dev/null +++ b/entity/job.pb.go @@ -0,0 +1,782 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.10 +// source: job.proto + +package entity + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type JobStatus int32 + +const ( + JobStatus_Draft JobStatus = 0 + JobStatus_NotReady JobStatus = 1 // dependencies not satisfied + JobStatus_Pending JobStatus = 2 // waiting in queue + JobStatus_Processing JobStatus = 3 + JobStatus_Completed JobStatus = 4 + JobStatus_Failed JobStatus = 255 +) + +// Enum value maps for JobStatus. +var ( + JobStatus_name = map[int32]string{ + 0: "Draft", + 1: "NotReady", + 2: "Pending", + 3: "Processing", + 4: "Completed", + 255: "Failed", + } + JobStatus_value = map[string]int32{ + "Draft": 0, + "NotReady": 1, + "Pending": 2, + "Processing": 3, + "Completed": 4, + "Failed": 255, + } +) + +func (x JobStatus) Enum() *JobStatus { + p := new(JobStatus) + *p = x + return p +} + +func (x JobStatus) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (JobStatus) Descriptor() protoreflect.EnumDescriptor { + return file_job_proto_enumTypes[0].Descriptor() +} + +func (JobStatus) Type() protoreflect.EnumType { + return &file_job_proto_enumTypes[0] +} + +func (x JobStatus) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use JobStatus.Descriptor instead. +func (JobStatus) EnumDescriptor() ([]byte, []int) { + return file_job_proto_rawDescGZIP(), []int{0} +} + +type Job struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id int64 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + Status JobStatus `protobuf:"varint,2,opt,name=status,proto3,enum=job.JobStatus" json:"status,omitempty"` + Priority int64 `protobuf:"varint,3,opt,name=priority,proto3" json:"priority,omitempty"` + CreateTime int64 `protobuf:"varint,4,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + UpdateTime int64 `protobuf:"varint,5,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + State *JobState `protobuf:"bytes,17,opt,name=state,proto3" json:"state,omitempty"` +} + +func (x *Job) Reset() { + *x = Job{} + if protoimpl.UnsafeEnabled { + mi := &file_job_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Job) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Job) ProtoMessage() {} + +func (x *Job) ProtoReflect() protoreflect.Message { + mi := &file_job_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Job.ProtoReflect.Descriptor instead. +func (*Job) Descriptor() ([]byte, []int) { + return file_job_proto_rawDescGZIP(), []int{0} +} + +func (x *Job) GetId() int64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *Job) GetStatus() JobStatus { + if x != nil { + return x.Status + } + return JobStatus_Draft +} + +func (x *Job) GetPriority() int64 { + if x != nil { + return x.Priority + } + return 0 +} + +func (x *Job) GetCreateTime() int64 { + if x != nil { + return x.CreateTime + } + return 0 +} + +func (x *Job) GetUpdateTime() int64 { + if x != nil { + return x.UpdateTime + } + return 0 +} + +func (x *Job) GetState() *JobState { + if x != nil { + return x.State + } + return nil +} + +type JobParam struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Param: + // *JobParam_Archive + Param isJobParam_Param `protobuf_oneof:"param"` +} + +func (x *JobParam) Reset() { + *x = JobParam{} + if protoimpl.UnsafeEnabled { + mi := &file_job_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobParam) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobParam) ProtoMessage() {} + +func (x *JobParam) ProtoReflect() protoreflect.Message { + mi := &file_job_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobParam.ProtoReflect.Descriptor instead. +func (*JobParam) Descriptor() ([]byte, []int) { + return file_job_proto_rawDescGZIP(), []int{1} +} + +func (m *JobParam) GetParam() isJobParam_Param { + if m != nil { + return m.Param + } + return nil +} + +func (x *JobParam) GetArchive() *JobParamArchive { + if x, ok := x.GetParam().(*JobParam_Archive); ok { + return x.Archive + } + return nil +} + +type isJobParam_Param interface { + isJobParam_Param() +} + +type JobParam_Archive struct { + Archive *JobParamArchive `protobuf:"bytes,1,opt,name=Archive,proto3,oneof"` +} + +func (*JobParam_Archive) isJobParam_Param() {} + +type JobState struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to State: + // *JobState_Archive + State isJobState_State `protobuf_oneof:"state"` +} + +func (x *JobState) Reset() { + *x = JobState{} + if protoimpl.UnsafeEnabled { + mi := &file_job_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobState) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobState) ProtoMessage() {} + +func (x *JobState) ProtoReflect() protoreflect.Message { + mi := &file_job_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobState.ProtoReflect.Descriptor instead. +func (*JobState) Descriptor() ([]byte, []int) { + return file_job_proto_rawDescGZIP(), []int{2} +} + +func (m *JobState) GetState() isJobState_State { + if m != nil { + return m.State + } + return nil +} + +func (x *JobState) GetArchive() *JobStateArchive { + if x, ok := x.GetState().(*JobState_Archive); ok { + return x.Archive + } + return nil +} + +type isJobState_State interface { + isJobState_State() +} + +type JobState_Archive struct { + Archive *JobStateArchive `protobuf:"bytes,1,opt,name=Archive,proto3,oneof"` +} + +func (*JobState_Archive) isJobState_State() {} + +type JobNextParam struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Param: + // *JobNextParam_Archive + Param isJobNextParam_Param `protobuf_oneof:"param"` +} + +func (x *JobNextParam) Reset() { + *x = JobNextParam{} + if protoimpl.UnsafeEnabled { + mi := &file_job_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobNextParam) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobNextParam) ProtoMessage() {} + +func (x *JobNextParam) ProtoReflect() protoreflect.Message { + mi := &file_job_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobNextParam.ProtoReflect.Descriptor instead. +func (*JobNextParam) Descriptor() ([]byte, []int) { + return file_job_proto_rawDescGZIP(), []int{3} +} + +func (m *JobNextParam) GetParam() isJobNextParam_Param { + if m != nil { + return m.Param + } + return nil +} + +func (x *JobNextParam) GetArchive() *JobArchiveNextParam { + if x, ok := x.GetParam().(*JobNextParam_Archive); ok { + return x.Archive + } + return nil +} + +type isJobNextParam_Param interface { + isJobNextParam_Param() +} + +type JobNextParam_Archive struct { + Archive *JobArchiveNextParam `protobuf:"bytes,1,opt,name=archive,proto3,oneof"` +} + +func (*JobNextParam_Archive) isJobNextParam_Param() {} + +type CreatableJob struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Priority int64 `protobuf:"varint,3,opt,name=priority,proto3" json:"priority,omitempty"` + Param *JobParam `protobuf:"bytes,17,opt,name=param,proto3" json:"param,omitempty"` +} + +func (x *CreatableJob) Reset() { + *x = CreatableJob{} + if protoimpl.UnsafeEnabled { + mi := &file_job_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CreatableJob) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreatableJob) ProtoMessage() {} + +func (x *CreatableJob) ProtoReflect() protoreflect.Message { + mi := &file_job_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreatableJob.ProtoReflect.Descriptor instead. +func (*CreatableJob) Descriptor() ([]byte, []int) { + return file_job_proto_rawDescGZIP(), []int{4} +} + +func (x *CreatableJob) GetPriority() int64 { + if x != nil { + return x.Priority + } + return 0 +} + +func (x *CreatableJob) GetParam() *JobParam { + if x != nil { + return x.Param + } + return nil +} + +type JobFilter struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Status *JobStatus `protobuf:"varint,1,opt,name=status,proto3,enum=job.JobStatus,oneof" json:"status,omitempty"` + Limit *int64 `protobuf:"varint,33,opt,name=limit,proto3,oneof" json:"limit,omitempty"` + Offset *int64 `protobuf:"varint,34,opt,name=offset,proto3,oneof" json:"offset,omitempty"` +} + +func (x *JobFilter) Reset() { + *x = JobFilter{} + if protoimpl.UnsafeEnabled { + mi := &file_job_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobFilter) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobFilter) ProtoMessage() {} + +func (x *JobFilter) ProtoReflect() protoreflect.Message { + mi := &file_job_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobFilter.ProtoReflect.Descriptor instead. +func (*JobFilter) Descriptor() ([]byte, []int) { + return file_job_proto_rawDescGZIP(), []int{5} +} + +func (x *JobFilter) GetStatus() JobStatus { + if x != nil && x.Status != nil { + return *x.Status + } + return JobStatus_Draft +} + +func (x *JobFilter) GetLimit() int64 { + if x != nil && x.Limit != nil { + return *x.Limit + } + return 0 +} + +func (x *JobFilter) GetOffset() int64 { + if x != nil && x.Offset != nil { + return *x.Offset + } + return 0 +} + +type JobDisplay struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Display: + // *JobDisplay_Archive + Display isJobDisplay_Display `protobuf_oneof:"display"` +} + +func (x *JobDisplay) Reset() { + *x = JobDisplay{} + if protoimpl.UnsafeEnabled { + mi := &file_job_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobDisplay) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobDisplay) ProtoMessage() {} + +func (x *JobDisplay) ProtoReflect() protoreflect.Message { + mi := &file_job_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobDisplay.ProtoReflect.Descriptor instead. +func (*JobDisplay) Descriptor() ([]byte, []int) { + return file_job_proto_rawDescGZIP(), []int{6} +} + +func (m *JobDisplay) GetDisplay() isJobDisplay_Display { + if m != nil { + return m.Display + } + return nil +} + +func (x *JobDisplay) GetArchive() *JobDisplayArchive { + if x, ok := x.GetDisplay().(*JobDisplay_Archive); ok { + return x.Archive + } + return nil +} + +type isJobDisplay_Display interface { + isJobDisplay_Display() +} + +type JobDisplay_Archive struct { + Archive *JobDisplayArchive `protobuf:"bytes,1,opt,name=archive,proto3,oneof"` +} + +func (*JobDisplay_Archive) isJobDisplay_Display() {} + +var File_job_proto protoreflect.FileDescriptor + +var file_job_proto_rawDesc = []byte{ + 0x0a, 0x09, 0x6a, 0x6f, 0x62, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x6a, 0x6f, 0x62, + 0x1a, 0x11, 0x6a, 0x6f, 0x62, 0x5f, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x22, 0xc0, 0x01, 0x0a, 0x03, 0x4a, 0x6f, 0x62, 0x12, 0x0e, 0x0a, 0x02, 0x69, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x02, 0x69, 0x64, 0x12, 0x26, 0x0a, 0x06, 0x73, + 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0e, 0x2e, 0x6a, 0x6f, + 0x62, 0x2e, 0x4a, 0x6f, 0x62, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, + 0x74, 0x75, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x12, + 0x1f, 0x0a, 0x0b, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x69, 0x6d, 0x65, + 0x12, 0x1f, 0x0a, 0x0b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x54, 0x69, 0x6d, + 0x65, 0x12, 0x23, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x0d, 0x2e, 0x6a, 0x6f, 0x62, 0x2e, 0x4a, 0x6f, 0x62, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, + 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x22, 0x4d, 0x0a, 0x08, 0x4a, 0x6f, 0x62, 0x50, 0x61, 0x72, + 0x61, 0x6d, 0x12, 0x38, 0x0a, 0x07, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x6a, 0x6f, 0x62, 0x5f, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, + 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, + 0x65, 0x48, 0x00, 0x52, 0x07, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x42, 0x07, 0x0a, 0x05, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x22, 0x4d, 0x0a, 0x08, 0x4a, 0x6f, 0x62, 0x53, 0x74, 0x61, 0x74, + 0x65, 0x12, 0x38, 0x0a, 0x07, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x6a, 0x6f, 0x62, 0x5f, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, + 0x2e, 0x4a, 0x6f, 0x62, 0x53, 0x74, 0x61, 0x74, 0x65, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, + 0x48, 0x00, 0x52, 0x07, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x42, 0x07, 0x0a, 0x05, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x22, 0x55, 0x0a, 0x0c, 0x4a, 0x6f, 0x62, 0x4e, 0x65, 0x78, 0x74, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x12, 0x3c, 0x0a, 0x07, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x6a, 0x6f, 0x62, 0x5f, 0x61, 0x72, 0x63, 0x68, + 0x69, 0x76, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x4e, 0x65, + 0x78, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x48, 0x00, 0x52, 0x07, 0x61, 0x72, 0x63, 0x68, 0x69, + 0x76, 0x65, 0x42, 0x07, 0x0a, 0x05, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x22, 0x4f, 0x0a, 0x0c, 0x43, + 0x72, 0x65, 0x61, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x1a, 0x0a, 0x08, 0x70, + 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x70, + 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x12, 0x23, 0x0a, 0x05, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x6a, 0x6f, 0x62, 0x2e, 0x4a, 0x6f, 0x62, + 0x50, 0x61, 0x72, 0x61, 0x6d, 0x52, 0x05, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x22, 0x90, 0x01, 0x0a, + 0x09, 0x4a, 0x6f, 0x62, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x2b, 0x0a, 0x06, 0x73, 0x74, + 0x61, 0x74, 0x75, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0e, 0x2e, 0x6a, 0x6f, 0x62, + 0x2e, 0x4a, 0x6f, 0x62, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x48, 0x00, 0x52, 0x06, 0x73, 0x74, + 0x61, 0x74, 0x75, 0x73, 0x88, 0x01, 0x01, 0x12, 0x19, 0x0a, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, + 0x18, 0x21, 0x20, 0x01, 0x28, 0x03, 0x48, 0x01, 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x88, + 0x01, 0x01, 0x12, 0x1b, 0x0a, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x22, 0x20, 0x01, + 0x28, 0x03, 0x48, 0x02, 0x52, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x88, 0x01, 0x01, 0x42, + 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x6c, + 0x69, 0x6d, 0x69, 0x74, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x22, + 0x53, 0x0a, 0x0a, 0x4a, 0x6f, 0x62, 0x44, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x12, 0x3a, 0x0a, + 0x07, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, + 0x2e, 0x6a, 0x6f, 0x62, 0x5f, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x2e, 0x4a, 0x6f, 0x62, + 0x44, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x48, 0x00, + 0x52, 0x07, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x42, 0x09, 0x0a, 0x07, 0x64, 0x69, 0x73, + 0x70, 0x6c, 0x61, 0x79, 0x2a, 0x5d, 0x0a, 0x09, 0x4a, 0x6f, 0x62, 0x53, 0x74, 0x61, 0x74, 0x75, + 0x73, 0x12, 0x09, 0x0a, 0x05, 0x44, 0x72, 0x61, 0x66, 0x74, 0x10, 0x00, 0x12, 0x0c, 0x0a, 0x08, + 0x4e, 0x6f, 0x74, 0x52, 0x65, 0x61, 0x64, 0x79, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x50, 0x65, + 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x10, 0x02, 0x12, 0x0e, 0x0a, 0x0a, 0x50, 0x72, 0x6f, 0x63, 0x65, + 0x73, 0x73, 0x69, 0x6e, 0x67, 0x10, 0x03, 0x12, 0x0d, 0x0a, 0x09, 0x43, 0x6f, 0x6d, 0x70, 0x6c, + 0x65, 0x74, 0x65, 0x64, 0x10, 0x04, 0x12, 0x0b, 0x0a, 0x06, 0x46, 0x61, 0x69, 0x6c, 0x65, 0x64, + 0x10, 0xff, 0x01, 0x42, 0x28, 0x5a, 0x26, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, + 0x6d, 0x2f, 0x61, 0x62, 0x63, 0x39, 0x35, 0x30, 0x33, 0x30, 0x39, 0x2f, 0x74, 0x61, 0x70, 0x65, + 0x77, 0x72, 0x69, 0x74, 0x65, 0x72, 0x2f, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x62, 0x06, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_job_proto_rawDescOnce sync.Once + file_job_proto_rawDescData = file_job_proto_rawDesc +) + +func file_job_proto_rawDescGZIP() []byte { + file_job_proto_rawDescOnce.Do(func() { + file_job_proto_rawDescData = protoimpl.X.CompressGZIP(file_job_proto_rawDescData) + }) + return file_job_proto_rawDescData +} + +var file_job_proto_enumTypes = make([]protoimpl.EnumInfo, 1) +var file_job_proto_msgTypes = make([]protoimpl.MessageInfo, 7) +var file_job_proto_goTypes = []interface{}{ + (JobStatus)(0), // 0: job.JobStatus + (*Job)(nil), // 1: job.Job + (*JobParam)(nil), // 2: job.JobParam + (*JobState)(nil), // 3: job.JobState + (*JobNextParam)(nil), // 4: job.JobNextParam + (*CreatableJob)(nil), // 5: job.CreatableJob + (*JobFilter)(nil), // 6: job.JobFilter + (*JobDisplay)(nil), // 7: job.JobDisplay + (*JobParamArchive)(nil), // 8: job_archive.JobParamArchive + (*JobStateArchive)(nil), // 9: job_archive.JobStateArchive + (*JobArchiveNextParam)(nil), // 10: job_archive.JobArchiveNextParam + (*JobDisplayArchive)(nil), // 11: job_archive.JobDisplayArchive +} +var file_job_proto_depIdxs = []int32{ + 0, // 0: job.Job.status:type_name -> job.JobStatus + 3, // 1: job.Job.state:type_name -> job.JobState + 8, // 2: job.JobParam.Archive:type_name -> job_archive.JobParamArchive + 9, // 3: job.JobState.Archive:type_name -> job_archive.JobStateArchive + 10, // 4: job.JobNextParam.archive:type_name -> job_archive.JobArchiveNextParam + 2, // 5: job.CreatableJob.param:type_name -> job.JobParam + 0, // 6: job.JobFilter.status:type_name -> job.JobStatus + 11, // 7: job.JobDisplay.archive:type_name -> job_archive.JobDisplayArchive + 8, // [8:8] is the sub-list for method output_type + 8, // [8:8] is the sub-list for method input_type + 8, // [8:8] is the sub-list for extension type_name + 8, // [8:8] is the sub-list for extension extendee + 0, // [0:8] is the sub-list for field type_name +} + +func init() { file_job_proto_init() } +func file_job_proto_init() { + if File_job_proto != nil { + return + } + file_job_archive_proto_init() + if !protoimpl.UnsafeEnabled { + file_job_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Job); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_job_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobParam); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_job_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobState); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_job_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobNextParam); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_job_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreatableJob); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_job_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobFilter); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_job_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobDisplay); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_job_proto_msgTypes[1].OneofWrappers = []interface{}{ + (*JobParam_Archive)(nil), + } + file_job_proto_msgTypes[2].OneofWrappers = []interface{}{ + (*JobState_Archive)(nil), + } + file_job_proto_msgTypes[3].OneofWrappers = []interface{}{ + (*JobNextParam_Archive)(nil), + } + file_job_proto_msgTypes[5].OneofWrappers = []interface{}{} + file_job_proto_msgTypes[6].OneofWrappers = []interface{}{ + (*JobDisplay_Archive)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_job_proto_rawDesc, + NumEnums: 1, + NumMessages: 7, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_job_proto_goTypes, + DependencyIndexes: file_job_proto_depIdxs, + EnumInfos: file_job_proto_enumTypes, + MessageInfos: file_job_proto_msgTypes, + }.Build() + File_job_proto = out.File + file_job_proto_rawDesc = nil + file_job_proto_goTypes = nil + file_job_proto_depIdxs = nil +} diff --git a/entity/job.proto b/entity/job.proto new file mode 100644 index 0000000..e8ac35f --- /dev/null +++ b/entity/job.proto @@ -0,0 +1,61 @@ +syntax = "proto3"; +package job; +option go_package = "github.com/abc950309/tapewriter/entity"; + +import "job_archive.proto"; + +enum JobStatus { + Draft = 0; + NotReady = 1; // dependencies not satisfied + Pending = 2; // waiting in queue + Processing = 3; + Completed = 4; + + Failed = 255; +} + +message Job { + int64 id = 1; + JobStatus status = 2; + int64 priority = 3; + int64 create_time = 4; + int64 update_time = 5; + + JobState state = 17; +} + +message JobParam { + oneof param { + job_archive.JobParamArchive Archive = 1; + } +} + +message JobState { + oneof state { + job_archive.JobStateArchive Archive = 1; + } +} + +message JobNextParam { + oneof param { + job_archive.JobArchiveNextParam archive = 1; + } +} + +message CreatableJob { + int64 priority = 3; + JobParam param = 17; +} + +message JobFilter { + optional JobStatus status = 1; + + optional int64 limit = 33; + optional int64 offset = 34; +} + +message JobDisplay { + oneof display { + job_archive.JobDisplayArchive archive = 1; + } +} diff --git a/entity/job_archive.pb.go b/entity/job_archive.pb.go new file mode 100644 index 0000000..a884e31 --- /dev/null +++ b/entity/job_archive.pb.go @@ -0,0 +1,708 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.10 +// source: job_archive.proto + +package entity + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type JobArchiveStep int32 + +const ( + JobArchiveStep_Pending JobArchiveStep = 0 + JobArchiveStep_WaitForTape JobArchiveStep = 1 + JobArchiveStep_Copying JobArchiveStep = 2 + JobArchiveStep_Finished JobArchiveStep = 255 +) + +// Enum value maps for JobArchiveStep. +var ( + JobArchiveStep_name = map[int32]string{ + 0: "Pending", + 1: "WaitForTape", + 2: "Copying", + 255: "Finished", + } + JobArchiveStep_value = map[string]int32{ + "Pending": 0, + "WaitForTape": 1, + "Copying": 2, + "Finished": 255, + } +) + +func (x JobArchiveStep) Enum() *JobArchiveStep { + p := new(JobArchiveStep) + *p = x + return p +} + +func (x JobArchiveStep) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (JobArchiveStep) Descriptor() protoreflect.EnumDescriptor { + return file_job_archive_proto_enumTypes[0].Descriptor() +} + +func (JobArchiveStep) Type() protoreflect.EnumType { + return &file_job_archive_proto_enumTypes[0] +} + +func (x JobArchiveStep) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use JobArchiveStep.Descriptor instead. +func (JobArchiveStep) EnumDescriptor() ([]byte, []int) { + return file_job_archive_proto_rawDescGZIP(), []int{0} +} + +type JobParamArchive struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Sources []*Source `protobuf:"bytes,1,rep,name=sources,proto3" json:"sources,omitempty"` +} + +func (x *JobParamArchive) Reset() { + *x = JobParamArchive{} + if protoimpl.UnsafeEnabled { + mi := &file_job_archive_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobParamArchive) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobParamArchive) ProtoMessage() {} + +func (x *JobParamArchive) ProtoReflect() protoreflect.Message { + mi := &file_job_archive_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobParamArchive.ProtoReflect.Descriptor instead. +func (*JobParamArchive) Descriptor() ([]byte, []int) { + return file_job_archive_proto_rawDescGZIP(), []int{0} +} + +func (x *JobParamArchive) GetSources() []*Source { + if x != nil { + return x.Sources + } + return nil +} + +type JobArchiveNextParam struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Param: + // *JobArchiveNextParam_WaitForTape + // *JobArchiveNextParam_Copying + // *JobArchiveNextParam_Finished + Param isJobArchiveNextParam_Param `protobuf_oneof:"param"` +} + +func (x *JobArchiveNextParam) Reset() { + *x = JobArchiveNextParam{} + if protoimpl.UnsafeEnabled { + mi := &file_job_archive_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobArchiveNextParam) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobArchiveNextParam) ProtoMessage() {} + +func (x *JobArchiveNextParam) ProtoReflect() protoreflect.Message { + mi := &file_job_archive_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobArchiveNextParam.ProtoReflect.Descriptor instead. +func (*JobArchiveNextParam) Descriptor() ([]byte, []int) { + return file_job_archive_proto_rawDescGZIP(), []int{1} +} + +func (m *JobArchiveNextParam) GetParam() isJobArchiveNextParam_Param { + if m != nil { + return m.Param + } + return nil +} + +func (x *JobArchiveNextParam) GetWaitForTape() *JobArchiveWaitForTapeParam { + if x, ok := x.GetParam().(*JobArchiveNextParam_WaitForTape); ok { + return x.WaitForTape + } + return nil +} + +func (x *JobArchiveNextParam) GetCopying() *JobArchiveCopyingParam { + if x, ok := x.GetParam().(*JobArchiveNextParam_Copying); ok { + return x.Copying + } + return nil +} + +func (x *JobArchiveNextParam) GetFinished() *JobArchiveFinishedParam { + if x, ok := x.GetParam().(*JobArchiveNextParam_Finished); ok { + return x.Finished + } + return nil +} + +type isJobArchiveNextParam_Param interface { + isJobArchiveNextParam_Param() +} + +type JobArchiveNextParam_WaitForTape struct { + WaitForTape *JobArchiveWaitForTapeParam `protobuf:"bytes,1,opt,name=WaitForTape,proto3,oneof"` +} + +type JobArchiveNextParam_Copying struct { + Copying *JobArchiveCopyingParam `protobuf:"bytes,2,opt,name=Copying,proto3,oneof"` +} + +type JobArchiveNextParam_Finished struct { + Finished *JobArchiveFinishedParam `protobuf:"bytes,255,opt,name=Finished,proto3,oneof"` +} + +func (*JobArchiveNextParam_WaitForTape) isJobArchiveNextParam_Param() {} + +func (*JobArchiveNextParam_Copying) isJobArchiveNextParam_Param() {} + +func (*JobArchiveNextParam_Finished) isJobArchiveNextParam_Param() {} + +type JobArchiveWaitForTapeParam struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *JobArchiveWaitForTapeParam) Reset() { + *x = JobArchiveWaitForTapeParam{} + if protoimpl.UnsafeEnabled { + mi := &file_job_archive_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobArchiveWaitForTapeParam) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobArchiveWaitForTapeParam) ProtoMessage() {} + +func (x *JobArchiveWaitForTapeParam) ProtoReflect() protoreflect.Message { + mi := &file_job_archive_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobArchiveWaitForTapeParam.ProtoReflect.Descriptor instead. +func (*JobArchiveWaitForTapeParam) Descriptor() ([]byte, []int) { + return file_job_archive_proto_rawDescGZIP(), []int{2} +} + +type JobArchiveCopyingParam struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Device string `protobuf:"bytes,1,opt,name=device,proto3" json:"device,omitempty"` + Barcode string `protobuf:"bytes,2,opt,name=barcode,proto3" json:"barcode,omitempty"` + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` +} + +func (x *JobArchiveCopyingParam) Reset() { + *x = JobArchiveCopyingParam{} + if protoimpl.UnsafeEnabled { + mi := &file_job_archive_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobArchiveCopyingParam) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobArchiveCopyingParam) ProtoMessage() {} + +func (x *JobArchiveCopyingParam) ProtoReflect() protoreflect.Message { + mi := &file_job_archive_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobArchiveCopyingParam.ProtoReflect.Descriptor instead. +func (*JobArchiveCopyingParam) Descriptor() ([]byte, []int) { + return file_job_archive_proto_rawDescGZIP(), []int{3} +} + +func (x *JobArchiveCopyingParam) GetDevice() string { + if x != nil { + return x.Device + } + return "" +} + +func (x *JobArchiveCopyingParam) GetBarcode() string { + if x != nil { + return x.Barcode + } + return "" +} + +func (x *JobArchiveCopyingParam) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +type JobArchiveFinishedParam struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *JobArchiveFinishedParam) Reset() { + *x = JobArchiveFinishedParam{} + if protoimpl.UnsafeEnabled { + mi := &file_job_archive_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobArchiveFinishedParam) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobArchiveFinishedParam) ProtoMessage() {} + +func (x *JobArchiveFinishedParam) ProtoReflect() protoreflect.Message { + mi := &file_job_archive_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobArchiveFinishedParam.ProtoReflect.Descriptor instead. +func (*JobArchiveFinishedParam) Descriptor() ([]byte, []int) { + return file_job_archive_proto_rawDescGZIP(), []int{4} +} + +type JobStateArchive struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Step JobArchiveStep `protobuf:"varint,1,opt,name=step,proto3,enum=job_archive.JobArchiveStep" json:"step,omitempty"` + Sources []*SourceState `protobuf:"bytes,2,rep,name=sources,proto3" json:"sources,omitempty"` +} + +func (x *JobStateArchive) Reset() { + *x = JobStateArchive{} + if protoimpl.UnsafeEnabled { + mi := &file_job_archive_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobStateArchive) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobStateArchive) ProtoMessage() {} + +func (x *JobStateArchive) ProtoReflect() protoreflect.Message { + mi := &file_job_archive_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobStateArchive.ProtoReflect.Descriptor instead. +func (*JobStateArchive) Descriptor() ([]byte, []int) { + return file_job_archive_proto_rawDescGZIP(), []int{5} +} + +func (x *JobStateArchive) GetStep() JobArchiveStep { + if x != nil { + return x.Step + } + return JobArchiveStep_Pending +} + +func (x *JobStateArchive) GetSources() []*SourceState { + if x != nil { + return x.Sources + } + return nil +} + +type JobDisplayArchive struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + CopyedBytes int64 `protobuf:"varint,1,opt,name=copyedBytes,proto3" json:"copyedBytes,omitempty"` + CopyedFiles int64 `protobuf:"varint,2,opt,name=copyedFiles,proto3" json:"copyedFiles,omitempty"` + TotalBytes int64 `protobuf:"varint,3,opt,name=totalBytes,proto3" json:"totalBytes,omitempty"` + TotalFiles int64 `protobuf:"varint,4,opt,name=totalFiles,proto3" json:"totalFiles,omitempty"` + Speed int64 `protobuf:"varint,5,opt,name=speed,proto3" json:"speed,omitempty"` +} + +func (x *JobDisplayArchive) Reset() { + *x = JobDisplayArchive{} + if protoimpl.UnsafeEnabled { + mi := &file_job_archive_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobDisplayArchive) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobDisplayArchive) ProtoMessage() {} + +func (x *JobDisplayArchive) ProtoReflect() protoreflect.Message { + mi := &file_job_archive_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobDisplayArchive.ProtoReflect.Descriptor instead. +func (*JobDisplayArchive) Descriptor() ([]byte, []int) { + return file_job_archive_proto_rawDescGZIP(), []int{6} +} + +func (x *JobDisplayArchive) GetCopyedBytes() int64 { + if x != nil { + return x.CopyedBytes + } + return 0 +} + +func (x *JobDisplayArchive) GetCopyedFiles() int64 { + if x != nil { + return x.CopyedFiles + } + return 0 +} + +func (x *JobDisplayArchive) GetTotalBytes() int64 { + if x != nil { + return x.TotalBytes + } + return 0 +} + +func (x *JobDisplayArchive) GetTotalFiles() int64 { + if x != nil { + return x.TotalFiles + } + return 0 +} + +func (x *JobDisplayArchive) GetSpeed() int64 { + if x != nil { + return x.Speed + } + return 0 +} + +var File_job_archive_proto protoreflect.FileDescriptor + +var file_job_archive_proto_rawDesc = []byte{ + 0x0a, 0x11, 0x6a, 0x6f, 0x62, 0x5f, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x12, 0x0b, 0x6a, 0x6f, 0x62, 0x5f, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, + 0x1a, 0x0c, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x3b, + 0x0a, 0x0f, 0x4a, 0x6f, 0x62, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, + 0x65, 0x12, 0x28, 0x0a, 0x07, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x53, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x52, 0x07, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x22, 0xf1, 0x01, 0x0a, 0x13, + 0x4a, 0x6f, 0x62, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x4e, 0x65, 0x78, 0x74, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x12, 0x4b, 0x0a, 0x0b, 0x57, 0x61, 0x69, 0x74, 0x46, 0x6f, 0x72, 0x54, 0x61, + 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x6a, 0x6f, 0x62, 0x5f, 0x61, + 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, + 0x65, 0x57, 0x61, 0x69, 0x74, 0x46, 0x6f, 0x72, 0x54, 0x61, 0x70, 0x65, 0x50, 0x61, 0x72, 0x61, + 0x6d, 0x48, 0x00, 0x52, 0x0b, 0x57, 0x61, 0x69, 0x74, 0x46, 0x6f, 0x72, 0x54, 0x61, 0x70, 0x65, + 0x12, 0x3f, 0x0a, 0x07, 0x43, 0x6f, 0x70, 0x79, 0x69, 0x6e, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x23, 0x2e, 0x6a, 0x6f, 0x62, 0x5f, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x2e, + 0x4a, 0x6f, 0x62, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x43, 0x6f, 0x70, 0x79, 0x69, 0x6e, + 0x67, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x48, 0x00, 0x52, 0x07, 0x43, 0x6f, 0x70, 0x79, 0x69, 0x6e, + 0x67, 0x12, 0x43, 0x0a, 0x08, 0x46, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x18, 0xff, 0x01, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x6a, 0x6f, 0x62, 0x5f, 0x61, 0x72, 0x63, 0x68, 0x69, + 0x76, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x46, 0x69, 0x6e, + 0x69, 0x73, 0x68, 0x65, 0x64, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x48, 0x00, 0x52, 0x08, 0x46, 0x69, + 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x42, 0x07, 0x0a, 0x05, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x22, + 0x1c, 0x0a, 0x1a, 0x4a, 0x6f, 0x62, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x57, 0x61, 0x69, + 0x74, 0x46, 0x6f, 0x72, 0x54, 0x61, 0x70, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x22, 0x5e, 0x0a, + 0x16, 0x4a, 0x6f, 0x62, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x43, 0x6f, 0x70, 0x79, 0x69, + 0x6e, 0x67, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x12, 0x16, 0x0a, 0x06, 0x64, 0x65, 0x76, 0x69, 0x63, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x64, 0x65, 0x76, 0x69, 0x63, 0x65, 0x12, + 0x18, 0x0a, 0x07, 0x62, 0x61, 0x72, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x07, 0x62, 0x61, 0x72, 0x63, 0x6f, 0x64, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x19, 0x0a, + 0x17, 0x4a, 0x6f, 0x62, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x46, 0x69, 0x6e, 0x69, 0x73, + 0x68, 0x65, 0x64, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x22, 0x71, 0x0a, 0x0f, 0x4a, 0x6f, 0x62, 0x53, + 0x74, 0x61, 0x74, 0x65, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x12, 0x2f, 0x0a, 0x04, 0x73, + 0x74, 0x65, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1b, 0x2e, 0x6a, 0x6f, 0x62, 0x5f, + 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x41, 0x72, 0x63, 0x68, 0x69, + 0x76, 0x65, 0x53, 0x74, 0x65, 0x70, 0x52, 0x04, 0x73, 0x74, 0x65, 0x70, 0x12, 0x2d, 0x0a, 0x07, + 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, + 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, + 0x74, 0x65, 0x52, 0x07, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x22, 0xad, 0x01, 0x0a, 0x11, + 0x4a, 0x6f, 0x62, 0x44, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, + 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x63, 0x6f, 0x70, 0x79, 0x65, 0x64, 0x42, 0x79, 0x74, 0x65, 0x73, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0b, 0x63, 0x6f, 0x70, 0x79, 0x65, 0x64, 0x42, 0x79, + 0x74, 0x65, 0x73, 0x12, 0x20, 0x0a, 0x0b, 0x63, 0x6f, 0x70, 0x79, 0x65, 0x64, 0x46, 0x69, 0x6c, + 0x65, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0b, 0x63, 0x6f, 0x70, 0x79, 0x65, 0x64, + 0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x42, 0x79, + 0x74, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, + 0x42, 0x79, 0x74, 0x65, 0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x46, 0x69, + 0x6c, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, + 0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x70, 0x65, 0x65, 0x64, 0x18, 0x05, + 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x73, 0x70, 0x65, 0x65, 0x64, 0x2a, 0x4a, 0x0a, 0x0e, 0x4a, + 0x6f, 0x62, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x53, 0x74, 0x65, 0x70, 0x12, 0x0b, 0x0a, + 0x07, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x10, 0x00, 0x12, 0x0f, 0x0a, 0x0b, 0x57, 0x61, + 0x69, 0x74, 0x46, 0x6f, 0x72, 0x54, 0x61, 0x70, 0x65, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x43, + 0x6f, 0x70, 0x79, 0x69, 0x6e, 0x67, 0x10, 0x02, 0x12, 0x0d, 0x0a, 0x08, 0x46, 0x69, 0x6e, 0x69, + 0x73, 0x68, 0x65, 0x64, 0x10, 0xff, 0x01, 0x42, 0x28, 0x5a, 0x26, 0x67, 0x69, 0x74, 0x68, 0x75, + 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x61, 0x62, 0x63, 0x39, 0x35, 0x30, 0x33, 0x30, 0x39, 0x2f, + 0x74, 0x61, 0x70, 0x65, 0x77, 0x72, 0x69, 0x74, 0x65, 0x72, 0x2f, 0x65, 0x6e, 0x74, 0x69, 0x74, + 0x79, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_job_archive_proto_rawDescOnce sync.Once + file_job_archive_proto_rawDescData = file_job_archive_proto_rawDesc +) + +func file_job_archive_proto_rawDescGZIP() []byte { + file_job_archive_proto_rawDescOnce.Do(func() { + file_job_archive_proto_rawDescData = protoimpl.X.CompressGZIP(file_job_archive_proto_rawDescData) + }) + return file_job_archive_proto_rawDescData +} + +var file_job_archive_proto_enumTypes = make([]protoimpl.EnumInfo, 1) +var file_job_archive_proto_msgTypes = make([]protoimpl.MessageInfo, 7) +var file_job_archive_proto_goTypes = []interface{}{ + (JobArchiveStep)(0), // 0: job_archive.JobArchiveStep + (*JobParamArchive)(nil), // 1: job_archive.JobParamArchive + (*JobArchiveNextParam)(nil), // 2: job_archive.JobArchiveNextParam + (*JobArchiveWaitForTapeParam)(nil), // 3: job_archive.JobArchiveWaitForTapeParam + (*JobArchiveCopyingParam)(nil), // 4: job_archive.JobArchiveCopyingParam + (*JobArchiveFinishedParam)(nil), // 5: job_archive.JobArchiveFinishedParam + (*JobStateArchive)(nil), // 6: job_archive.JobStateArchive + (*JobDisplayArchive)(nil), // 7: job_archive.JobDisplayArchive + (*Source)(nil), // 8: source.Source + (*SourceState)(nil), // 9: source.SourceState +} +var file_job_archive_proto_depIdxs = []int32{ + 8, // 0: job_archive.JobParamArchive.sources:type_name -> source.Source + 3, // 1: job_archive.JobArchiveNextParam.WaitForTape:type_name -> job_archive.JobArchiveWaitForTapeParam + 4, // 2: job_archive.JobArchiveNextParam.Copying:type_name -> job_archive.JobArchiveCopyingParam + 5, // 3: job_archive.JobArchiveNextParam.Finished:type_name -> job_archive.JobArchiveFinishedParam + 0, // 4: job_archive.JobStateArchive.step:type_name -> job_archive.JobArchiveStep + 9, // 5: job_archive.JobStateArchive.sources:type_name -> source.SourceState + 6, // [6:6] is the sub-list for method output_type + 6, // [6:6] is the sub-list for method input_type + 6, // [6:6] is the sub-list for extension type_name + 6, // [6:6] is the sub-list for extension extendee + 0, // [0:6] is the sub-list for field type_name +} + +func init() { file_job_archive_proto_init() } +func file_job_archive_proto_init() { + if File_job_archive_proto != nil { + return + } + file_source_proto_init() + if !protoimpl.UnsafeEnabled { + file_job_archive_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobParamArchive); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_job_archive_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobArchiveNextParam); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_job_archive_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobArchiveWaitForTapeParam); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_job_archive_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobArchiveCopyingParam); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_job_archive_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobArchiveFinishedParam); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_job_archive_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobStateArchive); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_job_archive_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobDisplayArchive); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_job_archive_proto_msgTypes[1].OneofWrappers = []interface{}{ + (*JobArchiveNextParam_WaitForTape)(nil), + (*JobArchiveNextParam_Copying)(nil), + (*JobArchiveNextParam_Finished)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_job_archive_proto_rawDesc, + NumEnums: 1, + NumMessages: 7, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_job_archive_proto_goTypes, + DependencyIndexes: file_job_archive_proto_depIdxs, + EnumInfos: file_job_archive_proto_enumTypes, + MessageInfos: file_job_archive_proto_msgTypes, + }.Build() + File_job_archive_proto = out.File + file_job_archive_proto_rawDesc = nil + file_job_archive_proto_goTypes = nil + file_job_archive_proto_depIdxs = nil +} diff --git a/entity/job_archive.proto b/entity/job_archive.proto new file mode 100644 index 0000000..8622082 --- /dev/null +++ b/entity/job_archive.proto @@ -0,0 +1,49 @@ +syntax = "proto3"; +package job_archive; +option go_package = "github.com/abc950309/tapewriter/entity"; + +import "source.proto"; + +enum JobArchiveStep { + Pending = 0; + WaitForTape = 1; + Copying = 2; + + Finished = 255; +} + +message JobParamArchive { + repeated source.Source sources = 1; +} + +message JobArchiveNextParam { + oneof param { + JobArchiveWaitForTapeParam WaitForTape = 1; + JobArchiveCopyingParam Copying = 2; + JobArchiveFinishedParam Finished = 255; + } +} + +message JobArchiveWaitForTapeParam {} + +message JobArchiveCopyingParam { + string device = 1; + string barcode = 2; + string name = 3; +} + +message JobArchiveFinishedParam {} + +message JobStateArchive { + JobArchiveStep step = 1; + repeated source.SourceState sources = 2; +} + +message JobDisplayArchive { + int64 copyedBytes = 1; + int64 copyedFiles = 2; + int64 totalBytes = 3; + int64 totalFiles = 4; + + optional int64 speed = 5; +} diff --git a/entity/job_restore.pb.go b/entity/job_restore.pb.go new file mode 100644 index 0000000..f7b90d8 --- /dev/null +++ b/entity/job_restore.pb.go @@ -0,0 +1,791 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.10 +// source: job_restore.proto + +package entity + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type JobRestoreStep int32 + +const ( + JobRestoreStep_Pending JobRestoreStep = 0 + JobRestoreStep_WaitForTape JobRestoreStep = 1 + JobRestoreStep_Copying JobRestoreStep = 2 + JobRestoreStep_Finished JobRestoreStep = 255 +) + +// Enum value maps for JobRestoreStep. +var ( + JobRestoreStep_name = map[int32]string{ + 0: "Pending", + 1: "WaitForTape", + 2: "Copying", + 255: "Finished", + } + JobRestoreStep_value = map[string]int32{ + "Pending": 0, + "WaitForTape": 1, + "Copying": 2, + "Finished": 255, + } +) + +func (x JobRestoreStep) Enum() *JobRestoreStep { + p := new(JobRestoreStep) + *p = x + return p +} + +func (x JobRestoreStep) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (JobRestoreStep) Descriptor() protoreflect.EnumDescriptor { + return file_job_restore_proto_enumTypes[0].Descriptor() +} + +func (JobRestoreStep) Type() protoreflect.EnumType { + return &file_job_restore_proto_enumTypes[0] +} + +func (x JobRestoreStep) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use JobRestoreStep.Descriptor instead. +func (JobRestoreStep) EnumDescriptor() ([]byte, []int) { + return file_job_restore_proto_rawDescGZIP(), []int{0} +} + +type JobParamRestore struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + FileIds []int64 `protobuf:"varint,1,rep,packed,name=file_ids,json=fileIds,proto3" json:"file_ids,omitempty"` +} + +func (x *JobParamRestore) Reset() { + *x = JobParamRestore{} + if protoimpl.UnsafeEnabled { + mi := &file_job_restore_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobParamRestore) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobParamRestore) ProtoMessage() {} + +func (x *JobParamRestore) ProtoReflect() protoreflect.Message { + mi := &file_job_restore_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobParamRestore.ProtoReflect.Descriptor instead. +func (*JobParamRestore) Descriptor() ([]byte, []int) { + return file_job_restore_proto_rawDescGZIP(), []int{0} +} + +func (x *JobParamRestore) GetFileIds() []int64 { + if x != nil { + return x.FileIds + } + return nil +} + +type JobRestoreNextParam struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Param: + // *JobRestoreNextParam_WaitForTape + // *JobRestoreNextParam_Copying + // *JobRestoreNextParam_Finished + Param isJobRestoreNextParam_Param `protobuf_oneof:"param"` +} + +func (x *JobRestoreNextParam) Reset() { + *x = JobRestoreNextParam{} + if protoimpl.UnsafeEnabled { + mi := &file_job_restore_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobRestoreNextParam) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobRestoreNextParam) ProtoMessage() {} + +func (x *JobRestoreNextParam) ProtoReflect() protoreflect.Message { + mi := &file_job_restore_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobRestoreNextParam.ProtoReflect.Descriptor instead. +func (*JobRestoreNextParam) Descriptor() ([]byte, []int) { + return file_job_restore_proto_rawDescGZIP(), []int{1} +} + +func (m *JobRestoreNextParam) GetParam() isJobRestoreNextParam_Param { + if m != nil { + return m.Param + } + return nil +} + +func (x *JobRestoreNextParam) GetWaitForTape() *JobRestoreWaitForTapeParam { + if x, ok := x.GetParam().(*JobRestoreNextParam_WaitForTape); ok { + return x.WaitForTape + } + return nil +} + +func (x *JobRestoreNextParam) GetCopying() *JobRestoreCopyingParam { + if x, ok := x.GetParam().(*JobRestoreNextParam_Copying); ok { + return x.Copying + } + return nil +} + +func (x *JobRestoreNextParam) GetFinished() *JobRestoreFinishedParam { + if x, ok := x.GetParam().(*JobRestoreNextParam_Finished); ok { + return x.Finished + } + return nil +} + +type isJobRestoreNextParam_Param interface { + isJobRestoreNextParam_Param() +} + +type JobRestoreNextParam_WaitForTape struct { + WaitForTape *JobRestoreWaitForTapeParam `protobuf:"bytes,1,opt,name=WaitForTape,proto3,oneof"` +} + +type JobRestoreNextParam_Copying struct { + Copying *JobRestoreCopyingParam `protobuf:"bytes,2,opt,name=Copying,proto3,oneof"` +} + +type JobRestoreNextParam_Finished struct { + Finished *JobRestoreFinishedParam `protobuf:"bytes,255,opt,name=Finished,proto3,oneof"` +} + +func (*JobRestoreNextParam_WaitForTape) isJobRestoreNextParam_Param() {} + +func (*JobRestoreNextParam_Copying) isJobRestoreNextParam_Param() {} + +func (*JobRestoreNextParam_Finished) isJobRestoreNextParam_Param() {} + +type JobRestoreWaitForTapeParam struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *JobRestoreWaitForTapeParam) Reset() { + *x = JobRestoreWaitForTapeParam{} + if protoimpl.UnsafeEnabled { + mi := &file_job_restore_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobRestoreWaitForTapeParam) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobRestoreWaitForTapeParam) ProtoMessage() {} + +func (x *JobRestoreWaitForTapeParam) ProtoReflect() protoreflect.Message { + mi := &file_job_restore_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobRestoreWaitForTapeParam.ProtoReflect.Descriptor instead. +func (*JobRestoreWaitForTapeParam) Descriptor() ([]byte, []int) { + return file_job_restore_proto_rawDescGZIP(), []int{2} +} + +type JobRestoreCopyingParam struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Device string `protobuf:"bytes,1,opt,name=device,proto3" json:"device,omitempty"` +} + +func (x *JobRestoreCopyingParam) Reset() { + *x = JobRestoreCopyingParam{} + if protoimpl.UnsafeEnabled { + mi := &file_job_restore_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobRestoreCopyingParam) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobRestoreCopyingParam) ProtoMessage() {} + +func (x *JobRestoreCopyingParam) ProtoReflect() protoreflect.Message { + mi := &file_job_restore_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobRestoreCopyingParam.ProtoReflect.Descriptor instead. +func (*JobRestoreCopyingParam) Descriptor() ([]byte, []int) { + return file_job_restore_proto_rawDescGZIP(), []int{3} +} + +func (x *JobRestoreCopyingParam) GetDevice() string { + if x != nil { + return x.Device + } + return "" +} + +type JobRestoreFinishedParam struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *JobRestoreFinishedParam) Reset() { + *x = JobRestoreFinishedParam{} + if protoimpl.UnsafeEnabled { + mi := &file_job_restore_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobRestoreFinishedParam) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobRestoreFinishedParam) ProtoMessage() {} + +func (x *JobRestoreFinishedParam) ProtoReflect() protoreflect.Message { + mi := &file_job_restore_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobRestoreFinishedParam.ProtoReflect.Descriptor instead. +func (*JobRestoreFinishedParam) Descriptor() ([]byte, []int) { + return file_job_restore_proto_rawDescGZIP(), []int{4} +} + +type FileRestoreState struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + FileId int64 `protobuf:"varint,1,opt,name=file_id,json=fileId,proto3" json:"file_id,omitempty"` + Status CopyStatus `protobuf:"varint,2,opt,name=status,proto3,enum=copy_status.CopyStatus" json:"status,omitempty"` + TapeId int64 `protobuf:"varint,17,opt,name=tape_id,json=tapeId,proto3" json:"tape_id,omitempty"` + PositionId int64 `protobuf:"varint,18,opt,name=position_id,json=positionId,proto3" json:"position_id,omitempty"` + PathInTape []string `protobuf:"bytes,19,rep,name=path_in_tape,json=pathInTape,proto3" json:"path_in_tape,omitempty"` +} + +func (x *FileRestoreState) Reset() { + *x = FileRestoreState{} + if protoimpl.UnsafeEnabled { + mi := &file_job_restore_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FileRestoreState) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FileRestoreState) ProtoMessage() {} + +func (x *FileRestoreState) ProtoReflect() protoreflect.Message { + mi := &file_job_restore_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FileRestoreState.ProtoReflect.Descriptor instead. +func (*FileRestoreState) Descriptor() ([]byte, []int) { + return file_job_restore_proto_rawDescGZIP(), []int{5} +} + +func (x *FileRestoreState) GetFileId() int64 { + if x != nil { + return x.FileId + } + return 0 +} + +func (x *FileRestoreState) GetStatus() CopyStatus { + if x != nil { + return x.Status + } + return CopyStatus_Draft +} + +func (x *FileRestoreState) GetTapeId() int64 { + if x != nil { + return x.TapeId + } + return 0 +} + +func (x *FileRestoreState) GetPositionId() int64 { + if x != nil { + return x.PositionId + } + return 0 +} + +func (x *FileRestoreState) GetPathInTape() []string { + if x != nil { + return x.PathInTape + } + return nil +} + +type JobStateRestore struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Step JobRestoreStep `protobuf:"varint,1,opt,name=step,proto3,enum=job_restore.JobRestoreStep" json:"step,omitempty"` + Files []*FileRestoreState `protobuf:"bytes,2,rep,name=files,proto3" json:"files,omitempty"` +} + +func (x *JobStateRestore) Reset() { + *x = JobStateRestore{} + if protoimpl.UnsafeEnabled { + mi := &file_job_restore_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobStateRestore) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobStateRestore) ProtoMessage() {} + +func (x *JobStateRestore) ProtoReflect() protoreflect.Message { + mi := &file_job_restore_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobStateRestore.ProtoReflect.Descriptor instead. +func (*JobStateRestore) Descriptor() ([]byte, []int) { + return file_job_restore_proto_rawDescGZIP(), []int{6} +} + +func (x *JobStateRestore) GetStep() JobRestoreStep { + if x != nil { + return x.Step + } + return JobRestoreStep_Pending +} + +func (x *JobStateRestore) GetFiles() []*FileRestoreState { + if x != nil { + return x.Files + } + return nil +} + +type JobDisplayRestore struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + CopyedBytes int64 `protobuf:"varint,1,opt,name=copyedBytes,proto3" json:"copyedBytes,omitempty"` + CopyedFiles int64 `protobuf:"varint,2,opt,name=copyedFiles,proto3" json:"copyedFiles,omitempty"` + TotalBytes int64 `protobuf:"varint,3,opt,name=totalBytes,proto3" json:"totalBytes,omitempty"` + TotalFiles int64 `protobuf:"varint,4,opt,name=totalFiles,proto3" json:"totalFiles,omitempty"` + Logs []byte `protobuf:"bytes,17,opt,name=logs,proto3" json:"logs,omitempty"` +} + +func (x *JobDisplayRestore) Reset() { + *x = JobDisplayRestore{} + if protoimpl.UnsafeEnabled { + mi := &file_job_restore_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobDisplayRestore) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobDisplayRestore) ProtoMessage() {} + +func (x *JobDisplayRestore) ProtoReflect() protoreflect.Message { + mi := &file_job_restore_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobDisplayRestore.ProtoReflect.Descriptor instead. +func (*JobDisplayRestore) Descriptor() ([]byte, []int) { + return file_job_restore_proto_rawDescGZIP(), []int{7} +} + +func (x *JobDisplayRestore) GetCopyedBytes() int64 { + if x != nil { + return x.CopyedBytes + } + return 0 +} + +func (x *JobDisplayRestore) GetCopyedFiles() int64 { + if x != nil { + return x.CopyedFiles + } + return 0 +} + +func (x *JobDisplayRestore) GetTotalBytes() int64 { + if x != nil { + return x.TotalBytes + } + return 0 +} + +func (x *JobDisplayRestore) GetTotalFiles() int64 { + if x != nil { + return x.TotalFiles + } + return 0 +} + +func (x *JobDisplayRestore) GetLogs() []byte { + if x != nil { + return x.Logs + } + return nil +} + +var File_job_restore_proto protoreflect.FileDescriptor + +var file_job_restore_proto_rawDesc = []byte{ + 0x0a, 0x11, 0x6a, 0x6f, 0x62, 0x5f, 0x72, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x12, 0x0b, 0x6a, 0x6f, 0x62, 0x5f, 0x72, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, + 0x1a, 0x11, 0x63, 0x6f, 0x70, 0x79, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x22, 0x2c, 0x0a, 0x0f, 0x4a, 0x6f, 0x62, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x52, + 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x69, + 0x64, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x03, 0x52, 0x07, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x64, + 0x73, 0x22, 0xf1, 0x01, 0x0a, 0x13, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, + 0x4e, 0x65, 0x78, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x12, 0x4b, 0x0a, 0x0b, 0x57, 0x61, 0x69, + 0x74, 0x46, 0x6f, 0x72, 0x54, 0x61, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, + 0x2e, 0x6a, 0x6f, 0x62, 0x5f, 0x72, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x4a, 0x6f, 0x62, + 0x52, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x57, 0x61, 0x69, 0x74, 0x46, 0x6f, 0x72, 0x54, 0x61, + 0x70, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x48, 0x00, 0x52, 0x0b, 0x57, 0x61, 0x69, 0x74, 0x46, + 0x6f, 0x72, 0x54, 0x61, 0x70, 0x65, 0x12, 0x3f, 0x0a, 0x07, 0x43, 0x6f, 0x70, 0x79, 0x69, 0x6e, + 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x6a, 0x6f, 0x62, 0x5f, 0x72, 0x65, + 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, + 0x43, 0x6f, 0x70, 0x79, 0x69, 0x6e, 0x67, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x48, 0x00, 0x52, 0x07, + 0x43, 0x6f, 0x70, 0x79, 0x69, 0x6e, 0x67, 0x12, 0x43, 0x0a, 0x08, 0x46, 0x69, 0x6e, 0x69, 0x73, + 0x68, 0x65, 0x64, 0x18, 0xff, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x6a, 0x6f, 0x62, + 0x5f, 0x72, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x74, + 0x6f, 0x72, 0x65, 0x46, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x50, 0x61, 0x72, 0x61, 0x6d, + 0x48, 0x00, 0x52, 0x08, 0x46, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x42, 0x07, 0x0a, 0x05, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x22, 0x1c, 0x0a, 0x1a, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x74, + 0x6f, 0x72, 0x65, 0x57, 0x61, 0x69, 0x74, 0x46, 0x6f, 0x72, 0x54, 0x61, 0x70, 0x65, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x22, 0x30, 0x0a, 0x16, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x74, 0x6f, 0x72, + 0x65, 0x43, 0x6f, 0x70, 0x79, 0x69, 0x6e, 0x67, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x12, 0x16, 0x0a, + 0x06, 0x64, 0x65, 0x76, 0x69, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x64, + 0x65, 0x76, 0x69, 0x63, 0x65, 0x22, 0x19, 0x0a, 0x17, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x74, + 0x6f, 0x72, 0x65, 0x46, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x50, 0x61, 0x72, 0x61, 0x6d, + 0x22, 0xb8, 0x01, 0x0a, 0x10, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, + 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x69, 0x64, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x64, 0x12, 0x2f, + 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x17, + 0x2e, 0x63, 0x6f, 0x70, 0x79, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x2e, 0x43, 0x6f, 0x70, + 0x79, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, + 0x17, 0x0a, 0x07, 0x74, 0x61, 0x70, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x11, 0x20, 0x01, 0x28, 0x03, + 0x52, 0x06, 0x74, 0x61, 0x70, 0x65, 0x49, 0x64, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x6f, 0x73, 0x69, + 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x12, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x70, + 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x20, 0x0a, 0x0c, 0x70, 0x61, 0x74, + 0x68, 0x5f, 0x69, 0x6e, 0x5f, 0x74, 0x61, 0x70, 0x65, 0x18, 0x13, 0x20, 0x03, 0x28, 0x09, 0x52, + 0x0a, 0x70, 0x61, 0x74, 0x68, 0x49, 0x6e, 0x54, 0x61, 0x70, 0x65, 0x22, 0x77, 0x0a, 0x0f, 0x4a, + 0x6f, 0x62, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x12, 0x2f, + 0x0a, 0x04, 0x73, 0x74, 0x65, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1b, 0x2e, 0x6a, + 0x6f, 0x62, 0x5f, 0x72, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x52, 0x65, + 0x73, 0x74, 0x6f, 0x72, 0x65, 0x53, 0x74, 0x65, 0x70, 0x52, 0x04, 0x73, 0x74, 0x65, 0x70, 0x12, + 0x33, 0x0a, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, + 0x2e, 0x6a, 0x6f, 0x62, 0x5f, 0x72, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x69, 0x6c, + 0x65, 0x52, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x66, + 0x69, 0x6c, 0x65, 0x73, 0x22, 0xab, 0x01, 0x0a, 0x11, 0x4a, 0x6f, 0x62, 0x44, 0x69, 0x73, 0x70, + 0x6c, 0x61, 0x79, 0x52, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x63, 0x6f, + 0x70, 0x79, 0x65, 0x64, 0x42, 0x79, 0x74, 0x65, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, + 0x0b, 0x63, 0x6f, 0x70, 0x79, 0x65, 0x64, 0x42, 0x79, 0x74, 0x65, 0x73, 0x12, 0x20, 0x0a, 0x0b, + 0x63, 0x6f, 0x70, 0x79, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x03, 0x52, 0x0b, 0x63, 0x6f, 0x70, 0x79, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x1e, + 0x0a, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x42, 0x79, 0x74, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x03, 0x52, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x42, 0x79, 0x74, 0x65, 0x73, 0x12, 0x1e, + 0x0a, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x03, 0x52, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x12, + 0x0a, 0x04, 0x6c, 0x6f, 0x67, 0x73, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x6c, 0x6f, + 0x67, 0x73, 0x2a, 0x4a, 0x0a, 0x0e, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, + 0x53, 0x74, 0x65, 0x70, 0x12, 0x0b, 0x0a, 0x07, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x10, + 0x00, 0x12, 0x0f, 0x0a, 0x0b, 0x57, 0x61, 0x69, 0x74, 0x46, 0x6f, 0x72, 0x54, 0x61, 0x70, 0x65, + 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x43, 0x6f, 0x70, 0x79, 0x69, 0x6e, 0x67, 0x10, 0x02, 0x12, + 0x0d, 0x0a, 0x08, 0x46, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x10, 0xff, 0x01, 0x42, 0x28, + 0x5a, 0x26, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x61, 0x62, 0x63, + 0x39, 0x35, 0x30, 0x33, 0x30, 0x39, 0x2f, 0x74, 0x61, 0x70, 0x65, 0x77, 0x72, 0x69, 0x74, 0x65, + 0x72, 0x2f, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_job_restore_proto_rawDescOnce sync.Once + file_job_restore_proto_rawDescData = file_job_restore_proto_rawDesc +) + +func file_job_restore_proto_rawDescGZIP() []byte { + file_job_restore_proto_rawDescOnce.Do(func() { + file_job_restore_proto_rawDescData = protoimpl.X.CompressGZIP(file_job_restore_proto_rawDescData) + }) + return file_job_restore_proto_rawDescData +} + +var file_job_restore_proto_enumTypes = make([]protoimpl.EnumInfo, 1) +var file_job_restore_proto_msgTypes = make([]protoimpl.MessageInfo, 8) +var file_job_restore_proto_goTypes = []interface{}{ + (JobRestoreStep)(0), // 0: job_restore.JobRestoreStep + (*JobParamRestore)(nil), // 1: job_restore.JobParamRestore + (*JobRestoreNextParam)(nil), // 2: job_restore.JobRestoreNextParam + (*JobRestoreWaitForTapeParam)(nil), // 3: job_restore.JobRestoreWaitForTapeParam + (*JobRestoreCopyingParam)(nil), // 4: job_restore.JobRestoreCopyingParam + (*JobRestoreFinishedParam)(nil), // 5: job_restore.JobRestoreFinishedParam + (*FileRestoreState)(nil), // 6: job_restore.FileRestoreState + (*JobStateRestore)(nil), // 7: job_restore.JobStateRestore + (*JobDisplayRestore)(nil), // 8: job_restore.JobDisplayRestore + (CopyStatus)(0), // 9: copy_status.CopyStatus +} +var file_job_restore_proto_depIdxs = []int32{ + 3, // 0: job_restore.JobRestoreNextParam.WaitForTape:type_name -> job_restore.JobRestoreWaitForTapeParam + 4, // 1: job_restore.JobRestoreNextParam.Copying:type_name -> job_restore.JobRestoreCopyingParam + 5, // 2: job_restore.JobRestoreNextParam.Finished:type_name -> job_restore.JobRestoreFinishedParam + 9, // 3: job_restore.FileRestoreState.status:type_name -> copy_status.CopyStatus + 0, // 4: job_restore.JobStateRestore.step:type_name -> job_restore.JobRestoreStep + 6, // 5: job_restore.JobStateRestore.files:type_name -> job_restore.FileRestoreState + 6, // [6:6] is the sub-list for method output_type + 6, // [6:6] is the sub-list for method input_type + 6, // [6:6] is the sub-list for extension type_name + 6, // [6:6] is the sub-list for extension extendee + 0, // [0:6] is the sub-list for field type_name +} + +func init() { file_job_restore_proto_init() } +func file_job_restore_proto_init() { + if File_job_restore_proto != nil { + return + } + file_copy_status_proto_init() + if !protoimpl.UnsafeEnabled { + file_job_restore_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobParamRestore); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_job_restore_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobRestoreNextParam); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_job_restore_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobRestoreWaitForTapeParam); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_job_restore_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobRestoreCopyingParam); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_job_restore_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobRestoreFinishedParam); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_job_restore_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FileRestoreState); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_job_restore_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobStateRestore); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_job_restore_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobDisplayRestore); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_job_restore_proto_msgTypes[1].OneofWrappers = []interface{}{ + (*JobRestoreNextParam_WaitForTape)(nil), + (*JobRestoreNextParam_Copying)(nil), + (*JobRestoreNextParam_Finished)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_job_restore_proto_rawDesc, + NumEnums: 1, + NumMessages: 8, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_job_restore_proto_goTypes, + DependencyIndexes: file_job_restore_proto_depIdxs, + EnumInfos: file_job_restore_proto_enumTypes, + MessageInfos: file_job_restore_proto_msgTypes, + }.Build() + File_job_restore_proto = out.File + file_job_restore_proto_rawDesc = nil + file_job_restore_proto_goTypes = nil + file_job_restore_proto_depIdxs = nil +} diff --git a/entity/job_restore.proto b/entity/job_restore.proto new file mode 100644 index 0000000..eab27ad --- /dev/null +++ b/entity/job_restore.proto @@ -0,0 +1,56 @@ +syntax = "proto3"; +package job_restore; +option go_package = "github.com/abc950309/tapewriter/entity"; + +import "copy_status.proto"; + +enum JobRestoreStep { + Pending = 0; + WaitForTape = 1; + Copying = 2; + + Finished = 255; +} + +message JobParamRestore { + repeated int64 file_ids = 1; +} + +message JobRestoreNextParam { + oneof param { + JobRestoreWaitForTapeParam WaitForTape = 1; + JobRestoreCopyingParam Copying = 2; + JobRestoreFinishedParam Finished = 255; + } +} + +message JobRestoreWaitForTapeParam {} + +message JobRestoreCopyingParam { + string device = 1; +} + +message JobRestoreFinishedParam {} + +message FileRestoreState { + int64 file_id = 1; + copy_status.CopyStatus status = 2; + + int64 tape_id = 17; + int64 position_id = 18; + repeated string path_in_tape = 19; +} + +message JobStateRestore { + JobRestoreStep step = 1; + repeated FileRestoreState files = 2; +} + +message JobDisplayRestore { + int64 copyedBytes = 1; + int64 copyedFiles = 2; + int64 totalBytes = 3; + int64 totalFiles = 4; + + bytes logs = 17; +} diff --git a/entity/position.pb.go b/entity/position.pb.go new file mode 100644 index 0000000..d5cfa46 --- /dev/null +++ b/entity/position.pb.go @@ -0,0 +1,219 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.10 +// source: position.proto + +package entity + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type Position struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id int64 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + FileId int64 `protobuf:"varint,2,opt,name=file_id,json=fileId,proto3" json:"file_id,omitempty"` + TapeId int64 `protobuf:"varint,3,opt,name=tape_id,json=tapeId,proto3" json:"tape_id,omitempty"` + Path string `protobuf:"bytes,4,opt,name=path,proto3" json:"path,omitempty"` + Mode int64 `protobuf:"varint,17,opt,name=mode,proto3" json:"mode,omitempty"` + ModTime int64 `protobuf:"varint,18,opt,name=mod_time,json=modTime,proto3" json:"mod_time,omitempty"` + WriteTime int64 `protobuf:"varint,19,opt,name=write_time,json=writeTime,proto3" json:"write_time,omitempty"` + Size int64 `protobuf:"varint,20,opt,name=size,proto3" json:"size,omitempty"` + Hash []byte `protobuf:"bytes,21,opt,name=hash,proto3" json:"hash,omitempty"` +} + +func (x *Position) Reset() { + *x = Position{} + if protoimpl.UnsafeEnabled { + mi := &file_position_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Position) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Position) ProtoMessage() {} + +func (x *Position) ProtoReflect() protoreflect.Message { + mi := &file_position_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Position.ProtoReflect.Descriptor instead. +func (*Position) Descriptor() ([]byte, []int) { + return file_position_proto_rawDescGZIP(), []int{0} +} + +func (x *Position) GetId() int64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *Position) GetFileId() int64 { + if x != nil { + return x.FileId + } + return 0 +} + +func (x *Position) GetTapeId() int64 { + if x != nil { + return x.TapeId + } + return 0 +} + +func (x *Position) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +func (x *Position) GetMode() int64 { + if x != nil { + return x.Mode + } + return 0 +} + +func (x *Position) GetModTime() int64 { + if x != nil { + return x.ModTime + } + return 0 +} + +func (x *Position) GetWriteTime() int64 { + if x != nil { + return x.WriteTime + } + return 0 +} + +func (x *Position) GetSize() int64 { + if x != nil { + return x.Size + } + return 0 +} + +func (x *Position) GetHash() []byte { + if x != nil { + return x.Hash + } + return nil +} + +var File_position_proto protoreflect.FileDescriptor + +var file_position_proto_rawDesc = []byte{ + 0x0a, 0x0e, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x12, 0x08, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0xd6, 0x01, 0x0a, 0x08, 0x50, + 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x03, 0x52, 0x02, 0x69, 0x64, 0x12, 0x17, 0x0a, 0x07, 0x66, 0x69, 0x6c, 0x65, 0x5f, + 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x64, + 0x12, 0x17, 0x0a, 0x07, 0x74, 0x61, 0x70, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x03, 0x52, 0x06, 0x74, 0x61, 0x70, 0x65, 0x49, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, + 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x12, 0x0a, + 0x04, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x11, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x6d, 0x6f, 0x64, + 0x65, 0x12, 0x19, 0x0a, 0x08, 0x6d, 0x6f, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x12, 0x20, + 0x01, 0x28, 0x03, 0x52, 0x07, 0x6d, 0x6f, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x1d, 0x0a, 0x0a, + 0x77, 0x72, 0x69, 0x74, 0x65, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x13, 0x20, 0x01, 0x28, 0x03, + 0x52, 0x09, 0x77, 0x72, 0x69, 0x74, 0x65, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x73, + 0x69, 0x7a, 0x65, 0x18, 0x14, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x12, + 0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x15, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x68, + 0x61, 0x73, 0x68, 0x42, 0x28, 0x5a, 0x26, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, + 0x6d, 0x2f, 0x61, 0x62, 0x63, 0x39, 0x35, 0x30, 0x33, 0x30, 0x39, 0x2f, 0x74, 0x61, 0x70, 0x65, + 0x77, 0x72, 0x69, 0x74, 0x65, 0x72, 0x2f, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x62, 0x06, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_position_proto_rawDescOnce sync.Once + file_position_proto_rawDescData = file_position_proto_rawDesc +) + +func file_position_proto_rawDescGZIP() []byte { + file_position_proto_rawDescOnce.Do(func() { + file_position_proto_rawDescData = protoimpl.X.CompressGZIP(file_position_proto_rawDescData) + }) + return file_position_proto_rawDescData +} + +var file_position_proto_msgTypes = make([]protoimpl.MessageInfo, 1) +var file_position_proto_goTypes = []interface{}{ + (*Position)(nil), // 0: position.Position +} +var file_position_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_position_proto_init() } +func file_position_proto_init() { + if File_position_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_position_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Position); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_position_proto_rawDesc, + NumEnums: 0, + NumMessages: 1, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_position_proto_goTypes, + DependencyIndexes: file_position_proto_depIdxs, + MessageInfos: file_position_proto_msgTypes, + }.Build() + File_position_proto = out.File + file_position_proto_rawDesc = nil + file_position_proto_goTypes = nil + file_position_proto_depIdxs = nil +} diff --git a/entity/position.proto b/entity/position.proto new file mode 100644 index 0000000..d7bff38 --- /dev/null +++ b/entity/position.proto @@ -0,0 +1,16 @@ +syntax = "proto3"; +package position; +option go_package = "github.com/abc950309/tapewriter/entity"; + +message Position { + int64 id = 1; + int64 file_id = 2; + int64 tape_id = 3; + string path = 4; + + int64 mode = 17; + int64 mod_time = 18; + int64 write_time = 19; + int64 size = 20; + bytes hash = 21; +} diff --git a/entity/service.pb.go b/entity/service.pb.go new file mode 100644 index 0000000..8ff6e5d --- /dev/null +++ b/entity/service.pb.go @@ -0,0 +1,2004 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.10 +// source: service.proto + +package entity + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type FileGetRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id int64 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` +} + +func (x *FileGetRequest) Reset() { + *x = FileGetRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FileGetRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FileGetRequest) ProtoMessage() {} + +func (x *FileGetRequest) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FileGetRequest.ProtoReflect.Descriptor instead. +func (*FileGetRequest) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{0} +} + +func (x *FileGetRequest) GetId() int64 { + if x != nil { + return x.Id + } + return 0 +} + +type FileGetReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + File *File `protobuf:"bytes,1,opt,name=file,proto3,oneof" json:"file,omitempty"` + Positions []*Position `protobuf:"bytes,2,rep,name=positions,proto3" json:"positions,omitempty"` + Children []*File `protobuf:"bytes,17,rep,name=children,proto3" json:"children,omitempty"` +} + +func (x *FileGetReply) Reset() { + *x = FileGetReply{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FileGetReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FileGetReply) ProtoMessage() {} + +func (x *FileGetReply) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FileGetReply.ProtoReflect.Descriptor instead. +func (*FileGetReply) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{1} +} + +func (x *FileGetReply) GetFile() *File { + if x != nil { + return x.File + } + return nil +} + +func (x *FileGetReply) GetPositions() []*Position { + if x != nil { + return x.Positions + } + return nil +} + +func (x *FileGetReply) GetChildren() []*File { + if x != nil { + return x.Children + } + return nil +} + +type FileEditRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id int64 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + File *EditedFile `protobuf:"bytes,2,opt,name=file,proto3" json:"file,omitempty"` +} + +func (x *FileEditRequest) Reset() { + *x = FileEditRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FileEditRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FileEditRequest) ProtoMessage() {} + +func (x *FileEditRequest) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FileEditRequest.ProtoReflect.Descriptor instead. +func (*FileEditRequest) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{2} +} + +func (x *FileEditRequest) GetId() int64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *FileEditRequest) GetFile() *EditedFile { + if x != nil { + return x.File + } + return nil +} + +type FileEditReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + File *File `protobuf:"bytes,1,opt,name=file,proto3" json:"file,omitempty"` +} + +func (x *FileEditReply) Reset() { + *x = FileEditReply{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FileEditReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FileEditReply) ProtoMessage() {} + +func (x *FileEditReply) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FileEditReply.ProtoReflect.Descriptor instead. +func (*FileEditReply) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{3} +} + +func (x *FileEditReply) GetFile() *File { + if x != nil { + return x.File + } + return nil +} + +type FileMkdirRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ParentId int64 `protobuf:"varint,1,opt,name=parent_id,json=parentId,proto3" json:"parent_id,omitempty"` + Path string `protobuf:"bytes,2,opt,name=path,proto3" json:"path,omitempty"` +} + +func (x *FileMkdirRequest) Reset() { + *x = FileMkdirRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FileMkdirRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FileMkdirRequest) ProtoMessage() {} + +func (x *FileMkdirRequest) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FileMkdirRequest.ProtoReflect.Descriptor instead. +func (*FileMkdirRequest) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{4} +} + +func (x *FileMkdirRequest) GetParentId() int64 { + if x != nil { + return x.ParentId + } + return 0 +} + +func (x *FileMkdirRequest) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +type FileMkdirReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + File *File `protobuf:"bytes,1,opt,name=file,proto3" json:"file,omitempty"` +} + +func (x *FileMkdirReply) Reset() { + *x = FileMkdirReply{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FileMkdirReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FileMkdirReply) ProtoMessage() {} + +func (x *FileMkdirReply) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FileMkdirReply.ProtoReflect.Descriptor instead. +func (*FileMkdirReply) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{5} +} + +func (x *FileMkdirReply) GetFile() *File { + if x != nil { + return x.File + } + return nil +} + +type FileDeleteRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Ids []int64 `protobuf:"varint,1,rep,packed,name=ids,proto3" json:"ids,omitempty"` +} + +func (x *FileDeleteRequest) Reset() { + *x = FileDeleteRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FileDeleteRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FileDeleteRequest) ProtoMessage() {} + +func (x *FileDeleteRequest) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FileDeleteRequest.ProtoReflect.Descriptor instead. +func (*FileDeleteRequest) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{6} +} + +func (x *FileDeleteRequest) GetIds() []int64 { + if x != nil { + return x.Ids + } + return nil +} + +type FileDeleteReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *FileDeleteReply) Reset() { + *x = FileDeleteReply{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FileDeleteReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FileDeleteReply) ProtoMessage() {} + +func (x *FileDeleteReply) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FileDeleteReply.ProtoReflect.Descriptor instead. +func (*FileDeleteReply) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{7} +} + +type FileListParentsRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id int64 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` +} + +func (x *FileListParentsRequest) Reset() { + *x = FileListParentsRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FileListParentsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FileListParentsRequest) ProtoMessage() {} + +func (x *FileListParentsRequest) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FileListParentsRequest.ProtoReflect.Descriptor instead. +func (*FileListParentsRequest) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{8} +} + +func (x *FileListParentsRequest) GetId() int64 { + if x != nil { + return x.Id + } + return 0 +} + +type FileListParentsReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Parents []*File `protobuf:"bytes,1,rep,name=parents,proto3" json:"parents,omitempty"` +} + +func (x *FileListParentsReply) Reset() { + *x = FileListParentsReply{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FileListParentsReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FileListParentsReply) ProtoMessage() {} + +func (x *FileListParentsReply) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FileListParentsReply.ProtoReflect.Descriptor instead. +func (*FileListParentsReply) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{9} +} + +func (x *FileListParentsReply) GetParents() []*File { + if x != nil { + return x.Parents + } + return nil +} + +type TapeMGetRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Ids []int64 `protobuf:"varint,1,rep,packed,name=ids,proto3" json:"ids,omitempty"` +} + +func (x *TapeMGetRequest) Reset() { + *x = TapeMGetRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *TapeMGetRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TapeMGetRequest) ProtoMessage() {} + +func (x *TapeMGetRequest) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TapeMGetRequest.ProtoReflect.Descriptor instead. +func (*TapeMGetRequest) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{10} +} + +func (x *TapeMGetRequest) GetIds() []int64 { + if x != nil { + return x.Ids + } + return nil +} + +type TapeMGetReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Tapes []*Tape `protobuf:"bytes,1,rep,name=tapes,proto3" json:"tapes,omitempty"` +} + +func (x *TapeMGetReply) Reset() { + *x = TapeMGetReply{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *TapeMGetReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TapeMGetReply) ProtoMessage() {} + +func (x *TapeMGetReply) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TapeMGetReply.ProtoReflect.Descriptor instead. +func (*TapeMGetReply) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{11} +} + +func (x *TapeMGetReply) GetTapes() []*Tape { + if x != nil { + return x.Tapes + } + return nil +} + +type JobListRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Param: + // *JobListRequest_Mget + // *JobListRequest_List + Param isJobListRequest_Param `protobuf_oneof:"param"` +} + +func (x *JobListRequest) Reset() { + *x = JobListRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobListRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobListRequest) ProtoMessage() {} + +func (x *JobListRequest) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobListRequest.ProtoReflect.Descriptor instead. +func (*JobListRequest) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{12} +} + +func (m *JobListRequest) GetParam() isJobListRequest_Param { + if m != nil { + return m.Param + } + return nil +} + +func (x *JobListRequest) GetMget() *JobMGetRequest { + if x, ok := x.GetParam().(*JobListRequest_Mget); ok { + return x.Mget + } + return nil +} + +func (x *JobListRequest) GetList() *JobFilter { + if x, ok := x.GetParam().(*JobListRequest_List); ok { + return x.List + } + return nil +} + +type isJobListRequest_Param interface { + isJobListRequest_Param() +} + +type JobListRequest_Mget struct { + Mget *JobMGetRequest `protobuf:"bytes,1,opt,name=mget,proto3,oneof"` +} + +type JobListRequest_List struct { + List *JobFilter `protobuf:"bytes,2,opt,name=list,proto3,oneof"` +} + +func (*JobListRequest_Mget) isJobListRequest_Param() {} + +func (*JobListRequest_List) isJobListRequest_Param() {} + +type JobMGetRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Ids []int64 `protobuf:"varint,1,rep,packed,name=ids,proto3" json:"ids,omitempty"` +} + +func (x *JobMGetRequest) Reset() { + *x = JobMGetRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobMGetRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobMGetRequest) ProtoMessage() {} + +func (x *JobMGetRequest) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[13] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobMGetRequest.ProtoReflect.Descriptor instead. +func (*JobMGetRequest) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{13} +} + +func (x *JobMGetRequest) GetIds() []int64 { + if x != nil { + return x.Ids + } + return nil +} + +type JobListReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Jobs []*Job `protobuf:"bytes,1,rep,name=jobs,proto3" json:"jobs,omitempty"` +} + +func (x *JobListReply) Reset() { + *x = JobListReply{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobListReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobListReply) ProtoMessage() {} + +func (x *JobListReply) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[14] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobListReply.ProtoReflect.Descriptor instead. +func (*JobListReply) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{14} +} + +func (x *JobListReply) GetJobs() []*Job { + if x != nil { + return x.Jobs + } + return nil +} + +type JobCreateRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Job *CreatableJob `protobuf:"bytes,1,opt,name=job,proto3" json:"job,omitempty"` +} + +func (x *JobCreateRequest) Reset() { + *x = JobCreateRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobCreateRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobCreateRequest) ProtoMessage() {} + +func (x *JobCreateRequest) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[15] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobCreateRequest.ProtoReflect.Descriptor instead. +func (*JobCreateRequest) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{15} +} + +func (x *JobCreateRequest) GetJob() *CreatableJob { + if x != nil { + return x.Job + } + return nil +} + +type JobCreateReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Job *Job `protobuf:"bytes,1,opt,name=job,proto3" json:"job,omitempty"` +} + +func (x *JobCreateReply) Reset() { + *x = JobCreateReply{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobCreateReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobCreateReply) ProtoMessage() {} + +func (x *JobCreateReply) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[16] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobCreateReply.ProtoReflect.Descriptor instead. +func (*JobCreateReply) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{16} +} + +func (x *JobCreateReply) GetJob() *Job { + if x != nil { + return x.Job + } + return nil +} + +type JobNextRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id int64 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + Param *JobNextParam `protobuf:"bytes,2,opt,name=param,proto3" json:"param,omitempty"` +} + +func (x *JobNextRequest) Reset() { + *x = JobNextRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobNextRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobNextRequest) ProtoMessage() {} + +func (x *JobNextRequest) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[17] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobNextRequest.ProtoReflect.Descriptor instead. +func (*JobNextRequest) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{17} +} + +func (x *JobNextRequest) GetId() int64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *JobNextRequest) GetParam() *JobNextParam { + if x != nil { + return x.Param + } + return nil +} + +type JobNextReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Job *Job `protobuf:"bytes,1,opt,name=job,proto3" json:"job,omitempty"` +} + +func (x *JobNextReply) Reset() { + *x = JobNextReply{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobNextReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobNextReply) ProtoMessage() {} + +func (x *JobNextReply) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[18] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobNextReply.ProtoReflect.Descriptor instead. +func (*JobNextReply) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{18} +} + +func (x *JobNextReply) GetJob() *Job { + if x != nil { + return x.Job + } + return nil +} + +type JobDisplayRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id int64 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` +} + +func (x *JobDisplayRequest) Reset() { + *x = JobDisplayRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobDisplayRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobDisplayRequest) ProtoMessage() {} + +func (x *JobDisplayRequest) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[19] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobDisplayRequest.ProtoReflect.Descriptor instead. +func (*JobDisplayRequest) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{19} +} + +func (x *JobDisplayRequest) GetId() int64 { + if x != nil { + return x.Id + } + return 0 +} + +type JobDisplayReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Display *JobDisplay `protobuf:"bytes,1,opt,name=display,proto3,oneof" json:"display,omitempty"` +} + +func (x *JobDisplayReply) Reset() { + *x = JobDisplayReply{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobDisplayReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobDisplayReply) ProtoMessage() {} + +func (x *JobDisplayReply) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[20] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobDisplayReply.ProtoReflect.Descriptor instead. +func (*JobDisplayReply) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{20} +} + +func (x *JobDisplayReply) GetDisplay() *JobDisplay { + if x != nil { + return x.Display + } + return nil +} + +type JobGetLogRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + JobId int64 `protobuf:"varint,1,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` + Offset int64 `protobuf:"varint,2,opt,name=offset,proto3" json:"offset,omitempty"` +} + +func (x *JobGetLogRequest) Reset() { + *x = JobGetLogRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobGetLogRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobGetLogRequest) ProtoMessage() {} + +func (x *JobGetLogRequest) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[21] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobGetLogRequest.ProtoReflect.Descriptor instead. +func (*JobGetLogRequest) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{21} +} + +func (x *JobGetLogRequest) GetJobId() int64 { + if x != nil { + return x.JobId + } + return 0 +} + +func (x *JobGetLogRequest) GetOffset() int64 { + if x != nil { + return x.Offset + } + return 0 +} + +type JobGetLogReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Logs []byte `protobuf:"bytes,1,opt,name=logs,proto3" json:"logs,omitempty"` +} + +func (x *JobGetLogReply) Reset() { + *x = JobGetLogReply{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobGetLogReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobGetLogReply) ProtoMessage() {} + +func (x *JobGetLogReply) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[22] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobGetLogReply.ProtoReflect.Descriptor instead. +func (*JobGetLogReply) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{22} +} + +func (x *JobGetLogReply) GetLogs() []byte { + if x != nil { + return x.Logs + } + return nil +} + +type SourceListRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` +} + +func (x *SourceListRequest) Reset() { + *x = SourceListRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[23] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SourceListRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SourceListRequest) ProtoMessage() {} + +func (x *SourceListRequest) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[23] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SourceListRequest.ProtoReflect.Descriptor instead. +func (*SourceListRequest) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{23} +} + +func (x *SourceListRequest) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +type SourceListReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + File *SourceFile `protobuf:"bytes,1,opt,name=file,proto3" json:"file,omitempty"` + Chain []*SourceFile `protobuf:"bytes,2,rep,name=chain,proto3" json:"chain,omitempty"` + Children []*SourceFile `protobuf:"bytes,17,rep,name=children,proto3" json:"children,omitempty"` +} + +func (x *SourceListReply) Reset() { + *x = SourceListReply{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[24] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SourceListReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SourceListReply) ProtoMessage() {} + +func (x *SourceListReply) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[24] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SourceListReply.ProtoReflect.Descriptor instead. +func (*SourceListReply) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{24} +} + +func (x *SourceListReply) GetFile() *SourceFile { + if x != nil { + return x.File + } + return nil +} + +func (x *SourceListReply) GetChain() []*SourceFile { + if x != nil { + return x.Chain + } + return nil +} + +func (x *SourceListReply) GetChildren() []*SourceFile { + if x != nil { + return x.Children + } + return nil +} + +type DeviceListRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *DeviceListRequest) Reset() { + *x = DeviceListRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[25] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeviceListRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeviceListRequest) ProtoMessage() {} + +func (x *DeviceListRequest) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[25] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeviceListRequest.ProtoReflect.Descriptor instead. +func (*DeviceListRequest) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{25} +} + +type DeviceListReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Devices []string `protobuf:"bytes,1,rep,name=devices,proto3" json:"devices,omitempty"` +} + +func (x *DeviceListReply) Reset() { + *x = DeviceListReply{} + if protoimpl.UnsafeEnabled { + mi := &file_service_proto_msgTypes[26] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeviceListReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeviceListReply) ProtoMessage() {} + +func (x *DeviceListReply) ProtoReflect() protoreflect.Message { + mi := &file_service_proto_msgTypes[26] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeviceListReply.ProtoReflect.Descriptor instead. +func (*DeviceListReply) Descriptor() ([]byte, []int) { + return file_service_proto_rawDescGZIP(), []int{26} +} + +func (x *DeviceListReply) GetDevices() []string { + if x != nil { + return x.Devices + } + return nil +} + +var File_service_proto protoreflect.FileDescriptor + +var file_service_proto_rawDesc = []byte{ + 0x0a, 0x0d, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, + 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x1a, 0x09, 0x6a, 0x6f, 0x62, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x1a, 0x0a, 0x66, 0x69, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, + 0x0e, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, + 0x0a, 0x74, 0x61, 0x70, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x0c, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x20, 0x0a, 0x0e, 0x46, 0x69, 0x6c, + 0x65, 0x47, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x02, 0x69, 0x64, 0x22, 0x96, 0x01, 0x0a, 0x0c, + 0x46, 0x69, 0x6c, 0x65, 0x47, 0x65, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x23, 0x0a, 0x04, + 0x66, 0x69, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x66, 0x69, 0x6c, + 0x65, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x48, 0x00, 0x52, 0x04, 0x66, 0x69, 0x6c, 0x65, 0x88, 0x01, + 0x01, 0x12, 0x30, 0x0a, 0x09, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x02, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, + 0x50, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x12, 0x26, 0x0a, 0x08, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x18, + 0x11, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x66, 0x69, 0x6c, 0x65, 0x2e, 0x46, 0x69, 0x6c, + 0x65, 0x52, 0x08, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x42, 0x07, 0x0a, 0x05, 0x5f, + 0x66, 0x69, 0x6c, 0x65, 0x22, 0x47, 0x0a, 0x0f, 0x46, 0x69, 0x6c, 0x65, 0x45, 0x64, 0x69, 0x74, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x03, 0x52, 0x02, 0x69, 0x64, 0x12, 0x24, 0x0a, 0x04, 0x66, 0x69, 0x6c, 0x65, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x66, 0x69, 0x6c, 0x65, 0x2e, 0x45, 0x64, 0x69, + 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x04, 0x66, 0x69, 0x6c, 0x65, 0x22, 0x2f, 0x0a, + 0x0d, 0x46, 0x69, 0x6c, 0x65, 0x45, 0x64, 0x69, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x1e, + 0x0a, 0x04, 0x66, 0x69, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x66, + 0x69, 0x6c, 0x65, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x04, 0x66, 0x69, 0x6c, 0x65, 0x22, 0x43, + 0x0a, 0x10, 0x46, 0x69, 0x6c, 0x65, 0x4d, 0x6b, 0x64, 0x69, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x12, + 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, + 0x61, 0x74, 0x68, 0x22, 0x30, 0x0a, 0x0e, 0x46, 0x69, 0x6c, 0x65, 0x4d, 0x6b, 0x64, 0x69, 0x72, + 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x1e, 0x0a, 0x04, 0x66, 0x69, 0x6c, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x66, 0x69, 0x6c, 0x65, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x52, + 0x04, 0x66, 0x69, 0x6c, 0x65, 0x22, 0x25, 0x0a, 0x11, 0x46, 0x69, 0x6c, 0x65, 0x44, 0x65, 0x6c, + 0x65, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x69, 0x64, + 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x03, 0x52, 0x03, 0x69, 0x64, 0x73, 0x22, 0x11, 0x0a, 0x0f, + 0x46, 0x69, 0x6c, 0x65, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, + 0x28, 0x0a, 0x16, 0x46, 0x69, 0x6c, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x61, 0x72, 0x65, 0x6e, + 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x02, 0x69, 0x64, 0x22, 0x3c, 0x0a, 0x14, 0x46, 0x69, 0x6c, + 0x65, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x70, 0x6c, + 0x79, 0x12, 0x24, 0x0a, 0x07, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x66, 0x69, 0x6c, 0x65, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x07, + 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x23, 0x0a, 0x0f, 0x54, 0x61, 0x70, 0x65, 0x4d, + 0x47, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x69, 0x64, + 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x03, 0x52, 0x03, 0x69, 0x64, 0x73, 0x22, 0x31, 0x0a, 0x0d, + 0x54, 0x61, 0x70, 0x65, 0x4d, 0x47, 0x65, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x20, 0x0a, + 0x05, 0x74, 0x61, 0x70, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x74, + 0x61, 0x70, 0x65, 0x2e, 0x54, 0x61, 0x70, 0x65, 0x52, 0x05, 0x74, 0x61, 0x70, 0x65, 0x73, 0x22, + 0x6e, 0x0a, 0x0e, 0x4a, 0x6f, 0x62, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x2d, 0x0a, 0x04, 0x6d, 0x67, 0x65, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x17, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x4d, 0x47, 0x65, + 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x04, 0x6d, 0x67, 0x65, 0x74, + 0x12, 0x24, 0x0a, 0x04, 0x6c, 0x69, 0x73, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, + 0x2e, 0x6a, 0x6f, 0x62, 0x2e, 0x4a, 0x6f, 0x62, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x48, 0x00, + 0x52, 0x04, 0x6c, 0x69, 0x73, 0x74, 0x42, 0x07, 0x0a, 0x05, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x22, + 0x22, 0x0a, 0x0e, 0x4a, 0x6f, 0x62, 0x4d, 0x47, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x10, 0x0a, 0x03, 0x69, 0x64, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x03, 0x52, 0x03, + 0x69, 0x64, 0x73, 0x22, 0x2c, 0x0a, 0x0c, 0x4a, 0x6f, 0x62, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, + 0x70, 0x6c, 0x79, 0x12, 0x1c, 0x0a, 0x04, 0x6a, 0x6f, 0x62, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x08, 0x2e, 0x6a, 0x6f, 0x62, 0x2e, 0x4a, 0x6f, 0x62, 0x52, 0x04, 0x6a, 0x6f, 0x62, + 0x73, 0x22, 0x37, 0x0a, 0x10, 0x4a, 0x6f, 0x62, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x23, 0x0a, 0x03, 0x6a, 0x6f, 0x62, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x6a, 0x6f, 0x62, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x61, 0x62, + 0x6c, 0x65, 0x4a, 0x6f, 0x62, 0x52, 0x03, 0x6a, 0x6f, 0x62, 0x22, 0x2c, 0x0a, 0x0e, 0x4a, 0x6f, + 0x62, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x1a, 0x0a, 0x03, + 0x6a, 0x6f, 0x62, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x08, 0x2e, 0x6a, 0x6f, 0x62, 0x2e, + 0x4a, 0x6f, 0x62, 0x52, 0x03, 0x6a, 0x6f, 0x62, 0x22, 0x49, 0x0a, 0x0e, 0x4a, 0x6f, 0x62, 0x4e, + 0x65, 0x78, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x02, 0x69, 0x64, 0x12, 0x27, 0x0a, 0x05, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x6a, 0x6f, 0x62, 0x2e, + 0x4a, 0x6f, 0x62, 0x4e, 0x65, 0x78, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x52, 0x05, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x22, 0x2a, 0x0a, 0x0c, 0x4a, 0x6f, 0x62, 0x4e, 0x65, 0x78, 0x74, 0x52, 0x65, + 0x70, 0x6c, 0x79, 0x12, 0x1a, 0x0a, 0x03, 0x6a, 0x6f, 0x62, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x08, 0x2e, 0x6a, 0x6f, 0x62, 0x2e, 0x4a, 0x6f, 0x62, 0x52, 0x03, 0x6a, 0x6f, 0x62, 0x22, + 0x23, 0x0a, 0x11, 0x4a, 0x6f, 0x62, 0x44, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, + 0x52, 0x02, 0x69, 0x64, 0x22, 0x4d, 0x0a, 0x0f, 0x4a, 0x6f, 0x62, 0x44, 0x69, 0x73, 0x70, 0x6c, + 0x61, 0x79, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x2e, 0x0a, 0x07, 0x64, 0x69, 0x73, 0x70, 0x6c, + 0x61, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6a, 0x6f, 0x62, 0x2e, 0x4a, + 0x6f, 0x62, 0x44, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x48, 0x00, 0x52, 0x07, 0x64, 0x69, 0x73, + 0x70, 0x6c, 0x61, 0x79, 0x88, 0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x64, 0x69, 0x73, 0x70, + 0x6c, 0x61, 0x79, 0x22, 0x41, 0x0a, 0x10, 0x4a, 0x6f, 0x62, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x16, + 0x0a, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x06, + 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x22, 0x24, 0x0a, 0x0e, 0x4a, 0x6f, 0x62, 0x47, 0x65, 0x74, + 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6c, 0x6f, 0x67, 0x73, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x6c, 0x6f, 0x67, 0x73, 0x22, 0x27, 0x0a, 0x11, + 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x70, 0x61, 0x74, 0x68, 0x22, 0x93, 0x01, 0x0a, 0x0f, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x26, 0x0a, 0x04, 0x66, 0x69, 0x6c, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x04, 0x66, 0x69, 0x6c, + 0x65, 0x12, 0x28, 0x0a, 0x05, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x12, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x46, 0x69, 0x6c, 0x65, 0x52, 0x05, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x12, 0x2e, 0x0a, 0x08, 0x63, + 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x18, 0x11, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, + 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x46, 0x69, 0x6c, + 0x65, 0x52, 0x08, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x13, 0x0a, 0x11, 0x44, + 0x65, 0x76, 0x69, 0x63, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x22, 0x2b, 0x0a, 0x0f, 0x44, 0x65, 0x76, 0x69, 0x63, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, + 0x70, 0x6c, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x64, 0x65, 0x76, 0x69, 0x63, 0x65, 0x73, 0x18, 0x01, + 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x64, 0x65, 0x76, 0x69, 0x63, 0x65, 0x73, 0x32, 0xf6, 0x06, + 0x0a, 0x07, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x3b, 0x0a, 0x07, 0x46, 0x69, 0x6c, + 0x65, 0x47, 0x65, 0x74, 0x12, 0x17, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x46, + 0x69, 0x6c, 0x65, 0x47, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, + 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x47, 0x65, 0x74, 0x52, + 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x3e, 0x0a, 0x08, 0x46, 0x69, 0x6c, 0x65, 0x45, 0x64, + 0x69, 0x74, 0x12, 0x18, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x46, 0x69, 0x6c, + 0x65, 0x45, 0x64, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x73, + 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x45, 0x64, 0x69, 0x74, 0x52, + 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x41, 0x0a, 0x09, 0x46, 0x69, 0x6c, 0x65, 0x4d, 0x6b, + 0x64, 0x69, 0x72, 0x12, 0x19, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x46, 0x69, + 0x6c, 0x65, 0x4d, 0x6b, 0x64, 0x69, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, + 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x4d, 0x6b, 0x64, + 0x69, 0x72, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x44, 0x0a, 0x0a, 0x46, 0x69, 0x6c, + 0x65, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x1a, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, + 0x65, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x46, 0x69, + 0x6c, 0x65, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, + 0x53, 0x0a, 0x0f, 0x46, 0x69, 0x6c, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x61, 0x72, 0x65, 0x6e, + 0x74, 0x73, 0x12, 0x1f, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x46, 0x69, 0x6c, + 0x65, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x46, 0x69, + 0x6c, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x70, + 0x6c, 0x79, 0x22, 0x00, 0x12, 0x3e, 0x0a, 0x08, 0x54, 0x61, 0x70, 0x65, 0x4d, 0x47, 0x65, 0x74, + 0x12, 0x18, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x54, 0x61, 0x70, 0x65, 0x4d, + 0x47, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x73, 0x65, 0x72, + 0x76, 0x69, 0x63, 0x65, 0x2e, 0x54, 0x61, 0x70, 0x65, 0x4d, 0x47, 0x65, 0x74, 0x52, 0x65, 0x70, + 0x6c, 0x79, 0x22, 0x00, 0x12, 0x3b, 0x0a, 0x07, 0x4a, 0x6f, 0x62, 0x4c, 0x69, 0x73, 0x74, 0x12, + 0x17, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x4c, 0x69, 0x73, + 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, + 0x63, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, + 0x00, 0x12, 0x41, 0x0a, 0x09, 0x4a, 0x6f, 0x62, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x12, 0x19, + 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x43, 0x72, 0x65, 0x61, + 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x73, 0x65, 0x72, 0x76, + 0x69, 0x63, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, + 0x6c, 0x79, 0x22, 0x00, 0x12, 0x3b, 0x0a, 0x07, 0x4a, 0x6f, 0x62, 0x4e, 0x65, 0x78, 0x74, 0x12, + 0x17, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x4e, 0x65, 0x78, + 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, + 0x63, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x4e, 0x65, 0x78, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, + 0x00, 0x12, 0x44, 0x0a, 0x0a, 0x4a, 0x6f, 0x62, 0x44, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x12, + 0x1a, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x44, 0x69, 0x73, + 0x70, 0x6c, 0x61, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x73, 0x65, + 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x44, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, + 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x41, 0x0a, 0x09, 0x4a, 0x6f, 0x62, 0x47, 0x65, + 0x74, 0x4c, 0x6f, 0x67, 0x12, 0x19, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x4a, + 0x6f, 0x62, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x17, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x47, 0x65, 0x74, + 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x44, 0x0a, 0x0a, 0x53, 0x6f, + 0x75, 0x72, 0x63, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x1a, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, + 0x63, 0x65, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x53, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, + 0x12, 0x44, 0x0a, 0x0a, 0x44, 0x65, 0x76, 0x69, 0x63, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x1a, + 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x44, 0x65, 0x76, 0x69, 0x63, 0x65, 0x4c, + 0x69, 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x73, 0x65, 0x72, + 0x76, 0x69, 0x63, 0x65, 0x2e, 0x44, 0x65, 0x76, 0x69, 0x63, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x52, + 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x42, 0x28, 0x5a, 0x26, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, + 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x61, 0x62, 0x63, 0x39, 0x35, 0x30, 0x33, 0x30, 0x39, 0x2f, 0x74, + 0x61, 0x70, 0x65, 0x77, 0x72, 0x69, 0x74, 0x65, 0x72, 0x2f, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, + 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_service_proto_rawDescOnce sync.Once + file_service_proto_rawDescData = file_service_proto_rawDesc +) + +func file_service_proto_rawDescGZIP() []byte { + file_service_proto_rawDescOnce.Do(func() { + file_service_proto_rawDescData = protoimpl.X.CompressGZIP(file_service_proto_rawDescData) + }) + return file_service_proto_rawDescData +} + +var file_service_proto_msgTypes = make([]protoimpl.MessageInfo, 27) +var file_service_proto_goTypes = []interface{}{ + (*FileGetRequest)(nil), // 0: service.FileGetRequest + (*FileGetReply)(nil), // 1: service.FileGetReply + (*FileEditRequest)(nil), // 2: service.FileEditRequest + (*FileEditReply)(nil), // 3: service.FileEditReply + (*FileMkdirRequest)(nil), // 4: service.FileMkdirRequest + (*FileMkdirReply)(nil), // 5: service.FileMkdirReply + (*FileDeleteRequest)(nil), // 6: service.FileDeleteRequest + (*FileDeleteReply)(nil), // 7: service.FileDeleteReply + (*FileListParentsRequest)(nil), // 8: service.FileListParentsRequest + (*FileListParentsReply)(nil), // 9: service.FileListParentsReply + (*TapeMGetRequest)(nil), // 10: service.TapeMGetRequest + (*TapeMGetReply)(nil), // 11: service.TapeMGetReply + (*JobListRequest)(nil), // 12: service.JobListRequest + (*JobMGetRequest)(nil), // 13: service.JobMGetRequest + (*JobListReply)(nil), // 14: service.JobListReply + (*JobCreateRequest)(nil), // 15: service.JobCreateRequest + (*JobCreateReply)(nil), // 16: service.JobCreateReply + (*JobNextRequest)(nil), // 17: service.JobNextRequest + (*JobNextReply)(nil), // 18: service.JobNextReply + (*JobDisplayRequest)(nil), // 19: service.JobDisplayRequest + (*JobDisplayReply)(nil), // 20: service.JobDisplayReply + (*JobGetLogRequest)(nil), // 21: service.JobGetLogRequest + (*JobGetLogReply)(nil), // 22: service.JobGetLogReply + (*SourceListRequest)(nil), // 23: service.SourceListRequest + (*SourceListReply)(nil), // 24: service.SourceListReply + (*DeviceListRequest)(nil), // 25: service.DeviceListRequest + (*DeviceListReply)(nil), // 26: service.DeviceListReply + (*File)(nil), // 27: file.File + (*Position)(nil), // 28: position.Position + (*EditedFile)(nil), // 29: file.EditedFile + (*Tape)(nil), // 30: tape.Tape + (*JobFilter)(nil), // 31: job.JobFilter + (*Job)(nil), // 32: job.Job + (*CreatableJob)(nil), // 33: job.CreatableJob + (*JobNextParam)(nil), // 34: job.JobNextParam + (*JobDisplay)(nil), // 35: job.JobDisplay + (*SourceFile)(nil), // 36: source.SourceFile +} +var file_service_proto_depIdxs = []int32{ + 27, // 0: service.FileGetReply.file:type_name -> file.File + 28, // 1: service.FileGetReply.positions:type_name -> position.Position + 27, // 2: service.FileGetReply.children:type_name -> file.File + 29, // 3: service.FileEditRequest.file:type_name -> file.EditedFile + 27, // 4: service.FileEditReply.file:type_name -> file.File + 27, // 5: service.FileMkdirReply.file:type_name -> file.File + 27, // 6: service.FileListParentsReply.parents:type_name -> file.File + 30, // 7: service.TapeMGetReply.tapes:type_name -> tape.Tape + 13, // 8: service.JobListRequest.mget:type_name -> service.JobMGetRequest + 31, // 9: service.JobListRequest.list:type_name -> job.JobFilter + 32, // 10: service.JobListReply.jobs:type_name -> job.Job + 33, // 11: service.JobCreateRequest.job:type_name -> job.CreatableJob + 32, // 12: service.JobCreateReply.job:type_name -> job.Job + 34, // 13: service.JobNextRequest.param:type_name -> job.JobNextParam + 32, // 14: service.JobNextReply.job:type_name -> job.Job + 35, // 15: service.JobDisplayReply.display:type_name -> job.JobDisplay + 36, // 16: service.SourceListReply.file:type_name -> source.SourceFile + 36, // 17: service.SourceListReply.chain:type_name -> source.SourceFile + 36, // 18: service.SourceListReply.children:type_name -> source.SourceFile + 0, // 19: service.Service.FileGet:input_type -> service.FileGetRequest + 2, // 20: service.Service.FileEdit:input_type -> service.FileEditRequest + 4, // 21: service.Service.FileMkdir:input_type -> service.FileMkdirRequest + 6, // 22: service.Service.FileDelete:input_type -> service.FileDeleteRequest + 8, // 23: service.Service.FileListParents:input_type -> service.FileListParentsRequest + 10, // 24: service.Service.TapeMGet:input_type -> service.TapeMGetRequest + 12, // 25: service.Service.JobList:input_type -> service.JobListRequest + 15, // 26: service.Service.JobCreate:input_type -> service.JobCreateRequest + 17, // 27: service.Service.JobNext:input_type -> service.JobNextRequest + 19, // 28: service.Service.JobDisplay:input_type -> service.JobDisplayRequest + 21, // 29: service.Service.JobGetLog:input_type -> service.JobGetLogRequest + 23, // 30: service.Service.SourceList:input_type -> service.SourceListRequest + 25, // 31: service.Service.DeviceList:input_type -> service.DeviceListRequest + 1, // 32: service.Service.FileGet:output_type -> service.FileGetReply + 3, // 33: service.Service.FileEdit:output_type -> service.FileEditReply + 5, // 34: service.Service.FileMkdir:output_type -> service.FileMkdirReply + 7, // 35: service.Service.FileDelete:output_type -> service.FileDeleteReply + 9, // 36: service.Service.FileListParents:output_type -> service.FileListParentsReply + 11, // 37: service.Service.TapeMGet:output_type -> service.TapeMGetReply + 14, // 38: service.Service.JobList:output_type -> service.JobListReply + 16, // 39: service.Service.JobCreate:output_type -> service.JobCreateReply + 18, // 40: service.Service.JobNext:output_type -> service.JobNextReply + 20, // 41: service.Service.JobDisplay:output_type -> service.JobDisplayReply + 22, // 42: service.Service.JobGetLog:output_type -> service.JobGetLogReply + 24, // 43: service.Service.SourceList:output_type -> service.SourceListReply + 26, // 44: service.Service.DeviceList:output_type -> service.DeviceListReply + 32, // [32:45] is the sub-list for method output_type + 19, // [19:32] is the sub-list for method input_type + 19, // [19:19] is the sub-list for extension type_name + 19, // [19:19] is the sub-list for extension extendee + 0, // [0:19] is the sub-list for field type_name +} + +func init() { file_service_proto_init() } +func file_service_proto_init() { + if File_service_proto != nil { + return + } + file_job_proto_init() + file_file_proto_init() + file_position_proto_init() + file_tape_proto_init() + file_source_proto_init() + if !protoimpl.UnsafeEnabled { + file_service_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FileGetRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FileGetReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FileEditRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FileEditReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FileMkdirRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FileMkdirReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FileDeleteRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FileDeleteReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FileListParentsRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FileListParentsReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*TapeMGetRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*TapeMGetReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobListRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobMGetRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobListReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobCreateRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobCreateReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobNextRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobNextReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobDisplayRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobDisplayReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobGetLogRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobGetLogReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SourceListRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SourceListReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeviceListRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_service_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeviceListReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_service_proto_msgTypes[1].OneofWrappers = []interface{}{} + file_service_proto_msgTypes[12].OneofWrappers = []interface{}{ + (*JobListRequest_Mget)(nil), + (*JobListRequest_List)(nil), + } + file_service_proto_msgTypes[20].OneofWrappers = []interface{}{} + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_service_proto_rawDesc, + NumEnums: 0, + NumMessages: 27, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_service_proto_goTypes, + DependencyIndexes: file_service_proto_depIdxs, + MessageInfos: file_service_proto_msgTypes, + }.Build() + File_service_proto = out.File + file_service_proto_rawDesc = nil + file_service_proto_goTypes = nil + file_service_proto_depIdxs = nil +} diff --git a/entity/service.proto b/entity/service.proto new file mode 100644 index 0000000..477c81b --- /dev/null +++ b/entity/service.proto @@ -0,0 +1,145 @@ +syntax = "proto3"; +package service; +option go_package = "github.com/abc950309/tapewriter/entity"; + +import "job.proto"; +import "file.proto"; +import "position.proto"; +import "tape.proto"; +import "source.proto"; + +service Service { + rpc FileGet(FileGetRequest) returns (FileGetReply) {} + rpc FileEdit(FileEditRequest) returns (FileEditReply) {} + rpc FileMkdir(FileMkdirRequest) returns (FileMkdirReply) {} + rpc FileDelete(FileDeleteRequest) returns (FileDeleteReply) {} + rpc FileListParents(FileListParentsRequest) returns (FileListParentsReply) {} + + rpc TapeMGet(TapeMGetRequest) returns (TapeMGetReply) {} + + rpc JobList(JobListRequest) returns (JobListReply) {} + rpc JobCreate(JobCreateRequest) returns (JobCreateReply) {} + rpc JobNext(JobNextRequest) returns (JobNextReply) {} + rpc JobDisplay(JobDisplayRequest) returns (JobDisplayReply) {} + rpc JobGetLog(JobGetLogRequest) returns (JobGetLogReply) {} + + rpc SourceList(SourceListRequest) returns (SourceListReply) {} + + rpc DeviceList(DeviceListRequest) returns (DeviceListReply) {} +} + +message FileGetRequest { + int64 id = 1; +} + +message FileGetReply { + optional file.File file = 1; + repeated position.Position positions = 2; + repeated file.File children = 17; +} + +message FileEditRequest { + int64 id = 1; + file.EditedFile file = 2; +} + +message FileEditReply { + file.File file = 1; +} + +message FileMkdirRequest { + int64 parent_id = 1; + string path = 2; +} + +message FileMkdirReply { + file.File file = 1; +} + +message FileDeleteRequest { + repeated int64 ids = 1; +} + +message FileDeleteReply { +} + +message FileListParentsRequest { + int64 id = 1; +} + +message FileListParentsReply { + repeated file.File parents = 1; +} + +message TapeMGetRequest { + repeated int64 ids = 1; +} + +message TapeMGetReply { + repeated tape.Tape tapes = 1; +} + +message JobListRequest { + oneof param { + JobMGetRequest mget = 1; + job.JobFilter list = 2; + } +} + +message JobMGetRequest { + repeated int64 ids = 1; +} + +message JobListReply { + repeated job.Job jobs = 1; +} + +message JobCreateRequest { + job.CreatableJob job = 1; +} + +message JobCreateReply { + job.Job job = 1; +} + +message JobNextRequest { + int64 id = 1; + job.JobNextParam param = 2; +} + +message JobNextReply { + job.Job job = 1; +} + +message JobDisplayRequest { + int64 id = 1; +} + +message JobDisplayReply { + job.JobDisplay display = 1; +} + +message JobGetLogRequest { + int64 job_id = 1; + optional int64 offset = 2; +} + +message JobGetLogReply { + bytes logs = 1; +} + +message SourceListRequest { + string path = 1; +} + +message SourceListReply { + source.SourceFile file = 1; + repeated source.SourceFile chain = 2; + repeated source.SourceFile children = 17; +} + +message DeviceListRequest {} + +message DeviceListReply { + repeated string devices = 1; +} diff --git a/entity/service_gen.sh b/entity/service_gen.sh new file mode 100755 index 0000000..4325047 --- /dev/null +++ b/entity/service_gen.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash +set -ex + +CURDIR=$(cd $(dirname $0); pwd); +cd ${CURDIR}; + +SRC_DIR=${CURDIR}; +GO_DST_DIR=${CURDIR}; +TS_DST_DIR=${CURDIR}/../frontend/src/apis; + +protoc --go_out=$GO_DST_DIR --go_opt=paths=source_relative \ + --go-grpc_out=$GO_DST_DIR --go-grpc_opt=paths=source_relative \ + -I=$SRC_DIR `ls *.proto`; + + # --js_out=import_style=es6,binary:$TS_DST_DIR \ + # --grpc-web_out=import_style=typescript,mode=grpcwebtext:$TS_DST_DIR \ diff --git a/entity/service_grpc.pb.go b/entity/service_grpc.pb.go new file mode 100644 index 0000000..ec48f93 --- /dev/null +++ b/entity/service_grpc.pb.go @@ -0,0 +1,537 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.2.0 +// - protoc v3.21.10 +// source: service.proto + +package entity + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +// ServiceClient is the client API for Service service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type ServiceClient interface { + FileGet(ctx context.Context, in *FileGetRequest, opts ...grpc.CallOption) (*FileGetReply, error) + FileEdit(ctx context.Context, in *FileEditRequest, opts ...grpc.CallOption) (*FileEditReply, error) + FileMkdir(ctx context.Context, in *FileMkdirRequest, opts ...grpc.CallOption) (*FileMkdirReply, error) + FileDelete(ctx context.Context, in *FileDeleteRequest, opts ...grpc.CallOption) (*FileDeleteReply, error) + FileListParents(ctx context.Context, in *FileListParentsRequest, opts ...grpc.CallOption) (*FileListParentsReply, error) + TapeMGet(ctx context.Context, in *TapeMGetRequest, opts ...grpc.CallOption) (*TapeMGetReply, error) + JobList(ctx context.Context, in *JobListRequest, opts ...grpc.CallOption) (*JobListReply, error) + JobCreate(ctx context.Context, in *JobCreateRequest, opts ...grpc.CallOption) (*JobCreateReply, error) + JobNext(ctx context.Context, in *JobNextRequest, opts ...grpc.CallOption) (*JobNextReply, error) + JobDisplay(ctx context.Context, in *JobDisplayRequest, opts ...grpc.CallOption) (*JobDisplayReply, error) + JobGetLog(ctx context.Context, in *JobGetLogRequest, opts ...grpc.CallOption) (*JobGetLogReply, error) + SourceList(ctx context.Context, in *SourceListRequest, opts ...grpc.CallOption) (*SourceListReply, error) + DeviceList(ctx context.Context, in *DeviceListRequest, opts ...grpc.CallOption) (*DeviceListReply, error) +} + +type serviceClient struct { + cc grpc.ClientConnInterface +} + +func NewServiceClient(cc grpc.ClientConnInterface) ServiceClient { + return &serviceClient{cc} +} + +func (c *serviceClient) FileGet(ctx context.Context, in *FileGetRequest, opts ...grpc.CallOption) (*FileGetReply, error) { + out := new(FileGetReply) + err := c.cc.Invoke(ctx, "/service.Service/FileGet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceClient) FileEdit(ctx context.Context, in *FileEditRequest, opts ...grpc.CallOption) (*FileEditReply, error) { + out := new(FileEditReply) + err := c.cc.Invoke(ctx, "/service.Service/FileEdit", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceClient) FileMkdir(ctx context.Context, in *FileMkdirRequest, opts ...grpc.CallOption) (*FileMkdirReply, error) { + out := new(FileMkdirReply) + err := c.cc.Invoke(ctx, "/service.Service/FileMkdir", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceClient) FileDelete(ctx context.Context, in *FileDeleteRequest, opts ...grpc.CallOption) (*FileDeleteReply, error) { + out := new(FileDeleteReply) + err := c.cc.Invoke(ctx, "/service.Service/FileDelete", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceClient) FileListParents(ctx context.Context, in *FileListParentsRequest, opts ...grpc.CallOption) (*FileListParentsReply, error) { + out := new(FileListParentsReply) + err := c.cc.Invoke(ctx, "/service.Service/FileListParents", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceClient) TapeMGet(ctx context.Context, in *TapeMGetRequest, opts ...grpc.CallOption) (*TapeMGetReply, error) { + out := new(TapeMGetReply) + err := c.cc.Invoke(ctx, "/service.Service/TapeMGet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceClient) JobList(ctx context.Context, in *JobListRequest, opts ...grpc.CallOption) (*JobListReply, error) { + out := new(JobListReply) + err := c.cc.Invoke(ctx, "/service.Service/JobList", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceClient) JobCreate(ctx context.Context, in *JobCreateRequest, opts ...grpc.CallOption) (*JobCreateReply, error) { + out := new(JobCreateReply) + err := c.cc.Invoke(ctx, "/service.Service/JobCreate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceClient) JobNext(ctx context.Context, in *JobNextRequest, opts ...grpc.CallOption) (*JobNextReply, error) { + out := new(JobNextReply) + err := c.cc.Invoke(ctx, "/service.Service/JobNext", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceClient) JobDisplay(ctx context.Context, in *JobDisplayRequest, opts ...grpc.CallOption) (*JobDisplayReply, error) { + out := new(JobDisplayReply) + err := c.cc.Invoke(ctx, "/service.Service/JobDisplay", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceClient) JobGetLog(ctx context.Context, in *JobGetLogRequest, opts ...grpc.CallOption) (*JobGetLogReply, error) { + out := new(JobGetLogReply) + err := c.cc.Invoke(ctx, "/service.Service/JobGetLog", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceClient) SourceList(ctx context.Context, in *SourceListRequest, opts ...grpc.CallOption) (*SourceListReply, error) { + out := new(SourceListReply) + err := c.cc.Invoke(ctx, "/service.Service/SourceList", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceClient) DeviceList(ctx context.Context, in *DeviceListRequest, opts ...grpc.CallOption) (*DeviceListReply, error) { + out := new(DeviceListReply) + err := c.cc.Invoke(ctx, "/service.Service/DeviceList", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ServiceServer is the server API for Service service. +// All implementations must embed UnimplementedServiceServer +// for forward compatibility +type ServiceServer interface { + FileGet(context.Context, *FileGetRequest) (*FileGetReply, error) + FileEdit(context.Context, *FileEditRequest) (*FileEditReply, error) + FileMkdir(context.Context, *FileMkdirRequest) (*FileMkdirReply, error) + FileDelete(context.Context, *FileDeleteRequest) (*FileDeleteReply, error) + FileListParents(context.Context, *FileListParentsRequest) (*FileListParentsReply, error) + TapeMGet(context.Context, *TapeMGetRequest) (*TapeMGetReply, error) + JobList(context.Context, *JobListRequest) (*JobListReply, error) + JobCreate(context.Context, *JobCreateRequest) (*JobCreateReply, error) + JobNext(context.Context, *JobNextRequest) (*JobNextReply, error) + JobDisplay(context.Context, *JobDisplayRequest) (*JobDisplayReply, error) + JobGetLog(context.Context, *JobGetLogRequest) (*JobGetLogReply, error) + SourceList(context.Context, *SourceListRequest) (*SourceListReply, error) + DeviceList(context.Context, *DeviceListRequest) (*DeviceListReply, error) + mustEmbedUnimplementedServiceServer() +} + +// UnimplementedServiceServer must be embedded to have forward compatible implementations. +type UnimplementedServiceServer struct { +} + +func (UnimplementedServiceServer) FileGet(context.Context, *FileGetRequest) (*FileGetReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method FileGet not implemented") +} +func (UnimplementedServiceServer) FileEdit(context.Context, *FileEditRequest) (*FileEditReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method FileEdit not implemented") +} +func (UnimplementedServiceServer) FileMkdir(context.Context, *FileMkdirRequest) (*FileMkdirReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method FileMkdir not implemented") +} +func (UnimplementedServiceServer) FileDelete(context.Context, *FileDeleteRequest) (*FileDeleteReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method FileDelete not implemented") +} +func (UnimplementedServiceServer) FileListParents(context.Context, *FileListParentsRequest) (*FileListParentsReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method FileListParents not implemented") +} +func (UnimplementedServiceServer) TapeMGet(context.Context, *TapeMGetRequest) (*TapeMGetReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method TapeMGet not implemented") +} +func (UnimplementedServiceServer) JobList(context.Context, *JobListRequest) (*JobListReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method JobList not implemented") +} +func (UnimplementedServiceServer) JobCreate(context.Context, *JobCreateRequest) (*JobCreateReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method JobCreate not implemented") +} +func (UnimplementedServiceServer) JobNext(context.Context, *JobNextRequest) (*JobNextReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method JobNext not implemented") +} +func (UnimplementedServiceServer) JobDisplay(context.Context, *JobDisplayRequest) (*JobDisplayReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method JobDisplay not implemented") +} +func (UnimplementedServiceServer) JobGetLog(context.Context, *JobGetLogRequest) (*JobGetLogReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method JobGetLog not implemented") +} +func (UnimplementedServiceServer) SourceList(context.Context, *SourceListRequest) (*SourceListReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method SourceList not implemented") +} +func (UnimplementedServiceServer) DeviceList(context.Context, *DeviceListRequest) (*DeviceListReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeviceList not implemented") +} +func (UnimplementedServiceServer) mustEmbedUnimplementedServiceServer() {} + +// UnsafeServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to ServiceServer will +// result in compilation errors. +type UnsafeServiceServer interface { + mustEmbedUnimplementedServiceServer() +} + +func RegisterServiceServer(s grpc.ServiceRegistrar, srv ServiceServer) { + s.RegisterService(&Service_ServiceDesc, srv) +} + +func _Service_FileGet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(FileGetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceServer).FileGet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/service.Service/FileGet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceServer).FileGet(ctx, req.(*FileGetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Service_FileEdit_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(FileEditRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceServer).FileEdit(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/service.Service/FileEdit", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceServer).FileEdit(ctx, req.(*FileEditRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Service_FileMkdir_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(FileMkdirRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceServer).FileMkdir(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/service.Service/FileMkdir", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceServer).FileMkdir(ctx, req.(*FileMkdirRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Service_FileDelete_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(FileDeleteRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceServer).FileDelete(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/service.Service/FileDelete", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceServer).FileDelete(ctx, req.(*FileDeleteRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Service_FileListParents_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(FileListParentsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceServer).FileListParents(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/service.Service/FileListParents", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceServer).FileListParents(ctx, req.(*FileListParentsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Service_TapeMGet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(TapeMGetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceServer).TapeMGet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/service.Service/TapeMGet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceServer).TapeMGet(ctx, req.(*TapeMGetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Service_JobList_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(JobListRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceServer).JobList(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/service.Service/JobList", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceServer).JobList(ctx, req.(*JobListRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Service_JobCreate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(JobCreateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceServer).JobCreate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/service.Service/JobCreate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceServer).JobCreate(ctx, req.(*JobCreateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Service_JobNext_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(JobNextRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceServer).JobNext(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/service.Service/JobNext", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceServer).JobNext(ctx, req.(*JobNextRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Service_JobDisplay_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(JobDisplayRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceServer).JobDisplay(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/service.Service/JobDisplay", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceServer).JobDisplay(ctx, req.(*JobDisplayRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Service_JobGetLog_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(JobGetLogRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceServer).JobGetLog(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/service.Service/JobGetLog", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceServer).JobGetLog(ctx, req.(*JobGetLogRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Service_SourceList_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SourceListRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceServer).SourceList(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/service.Service/SourceList", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceServer).SourceList(ctx, req.(*SourceListRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Service_DeviceList_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeviceListRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceServer).DeviceList(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/service.Service/DeviceList", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceServer).DeviceList(ctx, req.(*DeviceListRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// Service_ServiceDesc is the grpc.ServiceDesc for Service service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var Service_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "service.Service", + HandlerType: (*ServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "FileGet", + Handler: _Service_FileGet_Handler, + }, + { + MethodName: "FileEdit", + Handler: _Service_FileEdit_Handler, + }, + { + MethodName: "FileMkdir", + Handler: _Service_FileMkdir_Handler, + }, + { + MethodName: "FileDelete", + Handler: _Service_FileDelete_Handler, + }, + { + MethodName: "FileListParents", + Handler: _Service_FileListParents_Handler, + }, + { + MethodName: "TapeMGet", + Handler: _Service_TapeMGet_Handler, + }, + { + MethodName: "JobList", + Handler: _Service_JobList_Handler, + }, + { + MethodName: "JobCreate", + Handler: _Service_JobCreate_Handler, + }, + { + MethodName: "JobNext", + Handler: _Service_JobNext_Handler, + }, + { + MethodName: "JobDisplay", + Handler: _Service_JobDisplay_Handler, + }, + { + MethodName: "JobGetLog", + Handler: _Service_JobGetLog_Handler, + }, + { + MethodName: "SourceList", + Handler: _Service_SourceList_Handler, + }, + { + MethodName: "DeviceList", + Handler: _Service_DeviceList_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "service.proto", +} diff --git a/entity/source.go b/entity/source.go new file mode 100644 index 0000000..930a116 --- /dev/null +++ b/entity/source.go @@ -0,0 +1,72 @@ +package entity + +import ( + "path" + + "github.com/abc950309/acp" +) + +func NewSourceFromACPJob(job *acp.Job) *Source { + return &Source{Base: job.Base, Path: job.Path} +} + +func (x *Source) RealPath() string { + return x.Base + path.Join(x.Path...) +} + +func (x *Source) Append(more ...string) *Source { + path := make([]string, len(x.Path)+len(more)) + copy(path, x.Path) + copy(path[len(x.Path):], more) + + return &Source{Base: x.Base, Path: path} +} + +func (x *Source) Compare(xx *Source) int { + la, lb := len(x.Path), len(x.Path) + + l := la + if lb < la { + l = lb + } + + for idx := 0; idx < l; idx++ { + if x.Path[idx] < xx.Path[idx] { + return -1 + } + if x.Path[idx] > xx.Path[idx] { + return 1 + } + } + + if la < lb { + return -1 + } + if la > lb { + return 1 + } + + if x.Base < xx.Base { + return -1 + } + if x.Base > xx.Base { + return -1 + } + + return 0 +} + +func (x *Source) Equal(xx *Source) bool { + la, lb := len(x.Path), len(x.Path) + if la != lb { + return false + } + + for idx := 0; idx < la; idx++ { + if x.Path[idx] != xx.Path[idx] { + return false + } + } + + return true +} diff --git a/entity/source.pb.go b/entity/source.pb.go new file mode 100644 index 0000000..4a6bb2e --- /dev/null +++ b/entity/source.pb.go @@ -0,0 +1,362 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.10 +// source: source.proto + +package entity + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type SourceFile struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` + ParentPath string `protobuf:"bytes,2,opt,name=parent_path,json=parentPath,proto3" json:"parent_path,omitempty"` + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + Mode int64 `protobuf:"varint,17,opt,name=mode,proto3" json:"mode,omitempty"` + ModTime int64 `protobuf:"varint,18,opt,name=mod_time,json=modTime,proto3" json:"mod_time,omitempty"` + Size int64 `protobuf:"varint,19,opt,name=size,proto3" json:"size,omitempty"` +} + +func (x *SourceFile) Reset() { + *x = SourceFile{} + if protoimpl.UnsafeEnabled { + mi := &file_source_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SourceFile) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SourceFile) ProtoMessage() {} + +func (x *SourceFile) ProtoReflect() protoreflect.Message { + mi := &file_source_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SourceFile.ProtoReflect.Descriptor instead. +func (*SourceFile) Descriptor() ([]byte, []int) { + return file_source_proto_rawDescGZIP(), []int{0} +} + +func (x *SourceFile) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +func (x *SourceFile) GetParentPath() string { + if x != nil { + return x.ParentPath + } + return "" +} + +func (x *SourceFile) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *SourceFile) GetMode() int64 { + if x != nil { + return x.Mode + } + return 0 +} + +func (x *SourceFile) GetModTime() int64 { + if x != nil { + return x.ModTime + } + return 0 +} + +func (x *SourceFile) GetSize() int64 { + if x != nil { + return x.Size + } + return 0 +} + +type Source struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base string `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + Path []string `protobuf:"bytes,2,rep,name=path,proto3" json:"path,omitempty"` +} + +func (x *Source) Reset() { + *x = Source{} + if protoimpl.UnsafeEnabled { + mi := &file_source_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Source) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Source) ProtoMessage() {} + +func (x *Source) ProtoReflect() protoreflect.Message { + mi := &file_source_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Source.ProtoReflect.Descriptor instead. +func (*Source) Descriptor() ([]byte, []int) { + return file_source_proto_rawDescGZIP(), []int{1} +} + +func (x *Source) GetBase() string { + if x != nil { + return x.Base + } + return "" +} + +func (x *Source) GetPath() []string { + if x != nil { + return x.Path + } + return nil +} + +type SourceState struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Source *Source `protobuf:"bytes,1,opt,name=source,proto3" json:"source,omitempty"` + Size int64 `protobuf:"varint,2,opt,name=size,proto3" json:"size,omitempty"` + Status CopyStatus `protobuf:"varint,3,opt,name=status,proto3,enum=copy_status.CopyStatus" json:"status,omitempty"` + Message *string `protobuf:"bytes,4,opt,name=message,proto3,oneof" json:"message,omitempty"` +} + +func (x *SourceState) Reset() { + *x = SourceState{} + if protoimpl.UnsafeEnabled { + mi := &file_source_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SourceState) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SourceState) ProtoMessage() {} + +func (x *SourceState) ProtoReflect() protoreflect.Message { + mi := &file_source_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SourceState.ProtoReflect.Descriptor instead. +func (*SourceState) Descriptor() ([]byte, []int) { + return file_source_proto_rawDescGZIP(), []int{2} +} + +func (x *SourceState) GetSource() *Source { + if x != nil { + return x.Source + } + return nil +} + +func (x *SourceState) GetSize() int64 { + if x != nil { + return x.Size + } + return 0 +} + +func (x *SourceState) GetStatus() CopyStatus { + if x != nil { + return x.Status + } + return CopyStatus_Draft +} + +func (x *SourceState) GetMessage() string { + if x != nil && x.Message != nil { + return *x.Message + } + return "" +} + +var File_source_proto protoreflect.FileDescriptor + +var file_source_proto_rawDesc = []byte{ + 0x0a, 0x0c, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x06, + 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x1a, 0x11, 0x63, 0x6f, 0x70, 0x79, 0x5f, 0x73, 0x74, 0x61, + 0x74, 0x75, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x98, 0x01, 0x0a, 0x0a, 0x53, 0x6f, + 0x75, 0x72, 0x63, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x1f, 0x0a, 0x0b, + 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x50, 0x61, 0x74, 0x68, 0x12, 0x12, 0x0a, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x11, 0x20, 0x01, 0x28, 0x03, 0x52, + 0x04, 0x6d, 0x6f, 0x64, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x6d, 0x6f, 0x64, 0x5f, 0x74, 0x69, 0x6d, + 0x65, 0x18, 0x12, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x6d, 0x6f, 0x64, 0x54, 0x69, 0x6d, 0x65, + 0x12, 0x12, 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x13, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, + 0x73, 0x69, 0x7a, 0x65, 0x22, 0x30, 0x0a, 0x06, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x12, + 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x62, 0x61, + 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, + 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x22, 0xa5, 0x01, 0x0a, 0x0b, 0x53, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x26, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, + 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x12, + 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x73, 0x69, + 0x7a, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x0e, 0x32, 0x17, 0x2e, 0x63, 0x6f, 0x70, 0x79, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, + 0x2e, 0x43, 0x6f, 0x70, 0x79, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, + 0x74, 0x75, 0x73, 0x12, 0x1d, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x88, + 0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x42, 0x28, + 0x5a, 0x26, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x61, 0x62, 0x63, + 0x39, 0x35, 0x30, 0x33, 0x30, 0x39, 0x2f, 0x74, 0x61, 0x70, 0x65, 0x77, 0x72, 0x69, 0x74, 0x65, + 0x72, 0x2f, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_source_proto_rawDescOnce sync.Once + file_source_proto_rawDescData = file_source_proto_rawDesc +) + +func file_source_proto_rawDescGZIP() []byte { + file_source_proto_rawDescOnce.Do(func() { + file_source_proto_rawDescData = protoimpl.X.CompressGZIP(file_source_proto_rawDescData) + }) + return file_source_proto_rawDescData +} + +var file_source_proto_msgTypes = make([]protoimpl.MessageInfo, 3) +var file_source_proto_goTypes = []interface{}{ + (*SourceFile)(nil), // 0: source.SourceFile + (*Source)(nil), // 1: source.Source + (*SourceState)(nil), // 2: source.SourceState + (CopyStatus)(0), // 3: copy_status.CopyStatus +} +var file_source_proto_depIdxs = []int32{ + 1, // 0: source.SourceState.source:type_name -> source.Source + 3, // 1: source.SourceState.status:type_name -> copy_status.CopyStatus + 2, // [2:2] is the sub-list for method output_type + 2, // [2:2] is the sub-list for method input_type + 2, // [2:2] is the sub-list for extension type_name + 2, // [2:2] is the sub-list for extension extendee + 0, // [0:2] is the sub-list for field type_name +} + +func init() { file_source_proto_init() } +func file_source_proto_init() { + if File_source_proto != nil { + return + } + file_copy_status_proto_init() + if !protoimpl.UnsafeEnabled { + file_source_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SourceFile); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_source_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Source); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_source_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SourceState); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_source_proto_msgTypes[2].OneofWrappers = []interface{}{} + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_source_proto_rawDesc, + NumEnums: 0, + NumMessages: 3, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_source_proto_goTypes, + DependencyIndexes: file_source_proto_depIdxs, + MessageInfos: file_source_proto_msgTypes, + }.Build() + File_source_proto = out.File + file_source_proto_rawDesc = nil + file_source_proto_goTypes = nil + file_source_proto_depIdxs = nil +} diff --git a/entity/source.proto b/entity/source.proto new file mode 100644 index 0000000..2eee6de --- /dev/null +++ b/entity/source.proto @@ -0,0 +1,28 @@ +syntax = "proto3"; +package source; + +option go_package = "github.com/abc950309/tapewriter/entity"; + +import "copy_status.proto"; + +message SourceFile { + string path = 1; + string parent_path = 2; + string name = 3; + + int64 mode = 17; + int64 mod_time = 18; + int64 size = 19; +} + +message Source { + string base = 1; + repeated string path = 2; +} + +message SourceState { + Source source = 1; + int64 size = 2; + copy_status.CopyStatus status = 3; + optional string message = 4; +} diff --git a/entity/tape.pb.go b/entity/tape.pb.go new file mode 100644 index 0000000..0ddd973 --- /dev/null +++ b/entity/tape.pb.go @@ -0,0 +1,214 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.10 +// source: tape.proto + +package entity + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type Tape struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id int64 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + Barcode string `protobuf:"bytes,2,opt,name=barcode,proto3" json:"barcode,omitempty"` + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + Encryption string `protobuf:"bytes,4,opt,name=encryption,proto3" json:"encryption,omitempty"` + CreateTime int64 `protobuf:"varint,17,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + DestroyTime *int64 `protobuf:"varint,18,opt,name=destroy_time,json=destroyTime,proto3,oneof" json:"destroy_time,omitempty"` + CapacityBytes int64 `protobuf:"varint,19,opt,name=capacity_bytes,json=capacityBytes,proto3" json:"capacity_bytes,omitempty"` + WritenBytes int64 `protobuf:"varint,20,opt,name=writen_bytes,json=writenBytes,proto3" json:"writen_bytes,omitempty"` +} + +func (x *Tape) Reset() { + *x = Tape{} + if protoimpl.UnsafeEnabled { + mi := &file_tape_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Tape) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Tape) ProtoMessage() {} + +func (x *Tape) ProtoReflect() protoreflect.Message { + mi := &file_tape_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Tape.ProtoReflect.Descriptor instead. +func (*Tape) Descriptor() ([]byte, []int) { + return file_tape_proto_rawDescGZIP(), []int{0} +} + +func (x *Tape) GetId() int64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *Tape) GetBarcode() string { + if x != nil { + return x.Barcode + } + return "" +} + +func (x *Tape) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *Tape) GetEncryption() string { + if x != nil { + return x.Encryption + } + return "" +} + +func (x *Tape) GetCreateTime() int64 { + if x != nil { + return x.CreateTime + } + return 0 +} + +func (x *Tape) GetDestroyTime() int64 { + if x != nil && x.DestroyTime != nil { + return *x.DestroyTime + } + return 0 +} + +func (x *Tape) GetCapacityBytes() int64 { + if x != nil { + return x.CapacityBytes + } + return 0 +} + +func (x *Tape) GetWritenBytes() int64 { + if x != nil { + return x.WritenBytes + } + return 0 +} + +var File_tape_proto protoreflect.FileDescriptor + +var file_tape_proto_rawDesc = []byte{ + 0x0a, 0x0a, 0x74, 0x61, 0x70, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x04, 0x74, 0x61, + 0x70, 0x65, 0x22, 0x88, 0x02, 0x0a, 0x04, 0x54, 0x61, 0x70, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x02, 0x69, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x62, + 0x61, 0x72, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x62, 0x61, + 0x72, 0x63, 0x6f, 0x64, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x65, 0x6e, 0x63, + 0x72, 0x79, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x65, + 0x6e, 0x63, 0x72, 0x79, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1f, 0x0a, 0x0b, 0x63, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x11, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, + 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x26, 0x0a, 0x0c, 0x64, 0x65, + 0x73, 0x74, 0x72, 0x6f, 0x79, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x12, 0x20, 0x01, 0x28, 0x03, + 0x48, 0x00, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x74, 0x72, 0x6f, 0x79, 0x54, 0x69, 0x6d, 0x65, 0x88, + 0x01, 0x01, 0x12, 0x25, 0x0a, 0x0e, 0x63, 0x61, 0x70, 0x61, 0x63, 0x69, 0x74, 0x79, 0x5f, 0x62, + 0x79, 0x74, 0x65, 0x73, 0x18, 0x13, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0d, 0x63, 0x61, 0x70, 0x61, + 0x63, 0x69, 0x74, 0x79, 0x42, 0x79, 0x74, 0x65, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x77, 0x72, 0x69, + 0x74, 0x65, 0x6e, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x18, 0x14, 0x20, 0x01, 0x28, 0x03, 0x52, + 0x0b, 0x77, 0x72, 0x69, 0x74, 0x65, 0x6e, 0x42, 0x79, 0x74, 0x65, 0x73, 0x42, 0x0f, 0x0a, 0x0d, + 0x5f, 0x64, 0x65, 0x73, 0x74, 0x72, 0x6f, 0x79, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x42, 0x28, 0x5a, + 0x26, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x61, 0x62, 0x63, 0x39, + 0x35, 0x30, 0x33, 0x30, 0x39, 0x2f, 0x74, 0x61, 0x70, 0x65, 0x77, 0x72, 0x69, 0x74, 0x65, 0x72, + 0x2f, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_tape_proto_rawDescOnce sync.Once + file_tape_proto_rawDescData = file_tape_proto_rawDesc +) + +func file_tape_proto_rawDescGZIP() []byte { + file_tape_proto_rawDescOnce.Do(func() { + file_tape_proto_rawDescData = protoimpl.X.CompressGZIP(file_tape_proto_rawDescData) + }) + return file_tape_proto_rawDescData +} + +var file_tape_proto_msgTypes = make([]protoimpl.MessageInfo, 1) +var file_tape_proto_goTypes = []interface{}{ + (*Tape)(nil), // 0: tape.Tape +} +var file_tape_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_tape_proto_init() } +func file_tape_proto_init() { + if File_tape_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_tape_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Tape); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_tape_proto_msgTypes[0].OneofWrappers = []interface{}{} + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_tape_proto_rawDesc, + NumEnums: 0, + NumMessages: 1, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_tape_proto_goTypes, + DependencyIndexes: file_tape_proto_depIdxs, + MessageInfos: file_tape_proto_msgTypes, + }.Build() + File_tape_proto = out.File + file_tape_proto_rawDesc = nil + file_tape_proto_goTypes = nil + file_tape_proto_depIdxs = nil +} diff --git a/entity/tape.proto b/entity/tape.proto new file mode 100644 index 0000000..e391a8c --- /dev/null +++ b/entity/tape.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; +package tape; +option go_package = "github.com/abc950309/tapewriter/entity"; + +message Tape { + int64 id = 1; + string barcode = 2; + string name = 3; + string encryption = 4; + + int64 create_time = 17; + optional int64 destroy_time = 18; + int64 capacity_bytes = 19; + int64 writen_bytes = 20; +} diff --git a/entity/utils.go b/entity/utils.go new file mode 100644 index 0000000..1a024ef --- /dev/null +++ b/entity/utils.go @@ -0,0 +1,73 @@ +package entity + +import ( + "database/sql/driver" + "fmt" + reflect "reflect" + sync "sync" + + "github.com/modern-go/reflect2" + "google.golang.org/protobuf/proto" +) + +var ( + typeMap sync.Map +) + +// Scan implement database/sql.Scanner +func Scan(dst proto.Message, src interface{}) error { + cacheKey := reflect2.RTypeOf(dst) + typ, has := loadType(cacheKey) + if !has { + ptrType := reflect.TypeOf(dst) + if ptrType.Kind() != reflect.Ptr { + return fmt.Errorf("scan dst is not an ptr, has= %T", dst) + } + + typ = reflect2.Type2(ptrType.Elem()) + storeType(cacheKey, typ) + } + typ.Set(dst, typ.New()) + + var buf []byte + switch v := src.(type) { + case string: + buf = []byte(v) + case []byte: + buf = v + case nil: + return nil + default: + return fmt.Errorf("process define extra scanner, unexpected type for i18n, %T", v) + } + + if len(buf) == 0 { + return nil + } + + if err := proto.Unmarshal(buf, dst); err != nil { + return fmt.Errorf("process define extra scanner, json unmarshal fail, %w", err) + } + return nil +} + +// Value implement database/sql/driver.Valuer +func Value(src proto.Message) (driver.Value, error) { + buf, err := proto.Marshal(src) + if err != nil { + return nil, fmt.Errorf("process define extra valuer, json marshal fail, %w", err) + } + return buf, nil +} + +func loadType(key uintptr) (reflect2.Type, bool) { + i, has := typeMap.Load(key) + if !has { + return nil, false + } + return i.(reflect2.Type), true +} + +func storeType(key uintptr, typ reflect2.Type) { + typeMap.Store(key, typ) +} diff --git a/executor/executor.go b/executor/executor.go new file mode 100644 index 0000000..2ac7859 --- /dev/null +++ b/executor/executor.go @@ -0,0 +1,131 @@ +package executor + +import ( + "context" + "fmt" + "sort" + "sync" + + "github.com/abc950309/tapewriter/entity" + "github.com/abc950309/tapewriter/library" + mapset "github.com/deckarep/golang-set/v2" + "gorm.io/gorm" +) + +type Executor struct { + db *gorm.DB + lib *library.Library + + devices []string + + devicesLock sync.Mutex + availableDevices mapset.Set[string] + + workDirectory string + encryptScript string + mkfsScript string + mountScript string + umountScript string +} + +func New( + db *gorm.DB, lib *library.Library, + devices []string, workDirectory string, + encryptScript, mkfsScript, mountScript, umountScript string, +) *Executor { + return &Executor{ + db: db, + lib: lib, + devices: devices, + availableDevices: mapset.NewThreadUnsafeSet(devices...), + encryptScript: encryptScript, + mkfsScript: mkfsScript, + mountScript: mountScript, + umountScript: umountScript, + } +} + +func (e *Executor) AutoMigrate() error { + return e.db.AutoMigrate(ModelJob) +} + +func (e *Executor) ListAvailableDevices() []string { + e.devicesLock.Lock() + defer e.devicesLock.Unlock() + + devices := e.availableDevices.ToSlice() + sort.Slice(devices, func(i, j int) bool { + return devices[i] < devices[j] + }) + + return devices +} + +func (e *Executor) occupyDevice(dev string) bool { + e.devicesLock.Lock() + defer e.devicesLock.Unlock() + + if !e.availableDevices.Contains(dev) { + return false + } + + e.availableDevices.Remove(dev) + return true +} + +func (e *Executor) releaseDevice(dev string) { + e.devicesLock.Lock() + defer e.devicesLock.Unlock() + e.availableDevices.Add(dev) +} + +func (e *Executor) Start(ctx context.Context, job *Job) error { + job.Status = entity.JobStatus_Processing + if _, err := e.SaveJob(ctx, job); err != nil { + return err + } + + if state := job.State.GetArchive(); state != nil { + if err := e.startArchive(ctx, job); err != nil { + return err + } + return nil + } + + return fmt.Errorf("unexpected state type, %T", job.State.State) +} + +func (e *Executor) Submit(ctx context.Context, job *Job, param *entity.JobNextParam) error { + if job.Status != entity.JobStatus_Processing { + return fmt.Errorf("target job is not on processing, status= %s", job.Status) + } + + if state := job.State.GetArchive(); state != nil { + exe, err := e.newArchiveExecutor(ctx, job) + if err != nil { + return err + } + + exe.submit(param.GetArchive()) + return nil + } + + return fmt.Errorf("unexpected state type, %T", job.State.State) +} + +func (e *Executor) Display(ctx context.Context, job *Job) (*entity.JobDisplay, error) { + if job.Status != entity.JobStatus_Processing { + return nil, fmt.Errorf("target job is not on processing, status= %s", job.Status) + } + + if state := job.State.GetArchive(); state != nil { + display, err := e.getArchiveDisplay(ctx, job) + if err != nil { + return nil, err + } + + return &entity.JobDisplay{Display: &entity.JobDisplay_Archive{Archive: display}}, nil + } + + return nil, fmt.Errorf("unexpected state type, %T", job.State.State) +} diff --git a/executor/job.go b/executor/job.go new file mode 100644 index 0000000..53792ae --- /dev/null +++ b/executor/job.go @@ -0,0 +1,132 @@ +package executor + +import ( + "context" + "fmt" + "time" + + "github.com/abc950309/tapewriter/entity" + "gorm.io/gorm" +) + +var ( + ModelJob = &Job{} + + ErrJobNotFound = fmt.Errorf("get job: job not found") +) + +type Job struct { + ID int64 `gorm:"primaryKey;autoIncrement"` + Status entity.JobStatus + Priority int64 + State *entity.JobState + + CreateTime time.Time + UpdateTime time.Time +} + +func (j *Job) BeforeUpdate(tx *gorm.DB) error { + j.UpdateTime = time.Now() + if j.CreateTime.IsZero() { + j.CreateTime = j.UpdateTime + } + return nil +} + +func (e *Executor) initJob(ctx context.Context, job *Job, param *entity.JobParam) error { + if p := param.GetArchive(); p != nil { + return e.initArchive(ctx, job, p) + } + return fmt.Errorf("unexpected param type, %T", param.Param) +} + +func (e *Executor) CreateJob(ctx context.Context, job *Job, param *entity.JobParam) (*Job, error) { + if err := e.initJob(ctx, job, param); err != nil { + return nil, err + } + + if r := e.db.WithContext(ctx).Create(job); r.Error != nil { + return nil, fmt.Errorf("save job fail, err= %w", r.Error) + } + + return job, nil +} + +func (e *Executor) SaveJob(ctx context.Context, job *Job) (*Job, error) { + if r := e.db.WithContext(ctx).Save(job); r.Error != nil { + return nil, fmt.Errorf("save job fail, err= %w", r.Error) + } + return job, nil +} + +func (e *Executor) MGetJob(ctx context.Context, ids ...int64) (map[int64]*Job, error) { + if len(ids) == 0 { + return map[int64]*Job{}, nil + } + + jobs := make([]*Job, 0, len(ids)) + if r := e.db.WithContext(ctx).Where("id IN (?)", ids).Find(&jobs); r.Error != nil { + return nil, fmt.Errorf("list jobs fail, err= %w", r.Error) + } + + result := make(map[int64]*Job, len(jobs)) + for _, job := range jobs { + result[job.ID] = job + } + + return result, nil +} + +func (e *Executor) GetJob(ctx context.Context, id int64) (*Job, error) { + jobs, err := e.MGetJob(ctx, id) + if err != nil { + return nil, err + } + + job, ok := jobs[id] + if !ok || job == nil { + return nil, ErrJobNotFound + } + + return job, nil +} + +// func (e *Executor) getNextJob(ctx context.Context) (*Job, error) { +// job := new(Job) +// if r := e.db.WithContext(ctx). +// Where("status = ?", entity.JobStatus_Pending). +// Order("priority DESC, create_time ASC"). +// Limit(1).First(job); r.Error != nil { +// if errors.Is(r.Error, gorm.ErrRecordNotFound) { +// return nil, nil +// } +// return nil, r.Error +// } + +// return job, nil +// } + +func (e *Executor) ListJob(ctx context.Context, filter *entity.JobFilter) ([]*Job, error) { + db := e.db.WithContext(ctx) + if filter.Status != nil { + db.Where("status = ?", *filter.Status) + } + + if filter.Limit != nil { + db.Limit(int(*filter.Limit)) + } else { + db.Limit(20) + } + if filter.Offset != nil { + db.Offset(int(*filter.Offset)) + } + + db.Order("create_time DESC") + + jobs := make([]*Job, 0, 20) + if r := db.Find(&jobs); r.Error != nil { + return nil, fmt.Errorf("list jobs fail, err= %w", r.Error) + } + + return jobs, nil +} diff --git a/executor/job_archive_display.go b/executor/job_archive_display.go new file mode 100644 index 0000000..a5c9a1c --- /dev/null +++ b/executor/job_archive_display.go @@ -0,0 +1,22 @@ +package executor + +import ( + "context" + "sync/atomic" + + "github.com/abc950309/tapewriter/entity" +) + +func (e *Executor) getArchiveDisplay(ctx context.Context, job *Job) (*entity.JobDisplayArchive, error) { + display := new(entity.JobDisplayArchive) + + if exe := e.getArchiveExecutor(ctx, job); exe != nil && exe.progress != nil { + display.CopyedBytes = atomic.LoadInt64(&exe.progress.bytes) + display.CopyedFiles = atomic.LoadInt64(&exe.progress.files) + display.TotalBytes = atomic.LoadInt64(&exe.progress.totalBytes) + display.TotalFiles = atomic.LoadInt64(&exe.progress.totalFiles) + display.Speed = atomic.LoadInt64(&exe.progress.speed) + } + + return display, nil +} diff --git a/executor/job_archive_exe.go b/executor/job_archive_exe.go new file mode 100644 index 0000000..4a6997a --- /dev/null +++ b/executor/job_archive_exe.go @@ -0,0 +1,357 @@ +package executor + +import ( + "context" + "encoding/hex" + "fmt" + "io" + "os" + "os/exec" + "path" + "sort" + "sync" + "sync/atomic" + "time" + + "github.com/abc950309/acp" + "github.com/abc950309/tapewriter/entity" + "github.com/abc950309/tapewriter/library" + "github.com/abc950309/tapewriter/tools" + mapset "github.com/deckarep/golang-set/v2" + "github.com/sirupsen/logrus" +) + +var ( + runningArchives sync.Map +) + +func (e *Executor) getArchiveExecutor(ctx context.Context, job *Job) *jobArchiveExecutor { + if running, has := runningArchives.Load(job.ID); has { + return running.(*jobArchiveExecutor) + } + return nil +} + +func (e *Executor) newArchiveExecutor(ctx context.Context, job *Job) (*jobArchiveExecutor, error) { + if exe := e.getArchiveExecutor(ctx, job); exe != nil { + return exe, nil + } + + logFile, err := e.newLogWriter(job.ID) + if err != nil { + return nil, fmt.Errorf("get log writer fail, %w", err) + } + + logger := logrus.New() + logger.SetOutput(io.MultiWriter(os.Stderr, logFile)) + + exe := &jobArchiveExecutor{ + ctx: context.Background(), + exe: e, + job: job, + + state: job.State.GetArchive(), + + progress: new(progress), + logFile: logFile, + logger: logger, + } + + runningArchives.Store(job.ID, exe) + return exe, nil +} + +type jobArchiveExecutor struct { + ctx context.Context + exe *Executor + job *Job + + stateLock sync.Mutex + state *entity.JobStateArchive + + progress *progress + logFile *os.File + logger *logrus.Logger +} + +func (a *jobArchiveExecutor) submit(param *entity.JobArchiveNextParam) { + if err := a.handle(param); err != nil { + a.logger.WithContext(a.ctx).Infof("handler param fail, err= %w", err) + } +} + +func (a *jobArchiveExecutor) handle(param *entity.JobArchiveNextParam) error { + if p := param.GetCopying(); p != nil { + if err := a.switchStep(entity.JobArchiveStep_Copying, entity.JobStatus_Processing, mapset.NewThreadUnsafeSet(entity.JobArchiveStep_WaitForTape)); err != nil { + return err + } + + go tools.Wrap(a.ctx, func() { + _, err := a.makeTape(p.Device, p.Barcode, p.Name) + if err != nil { + a.logger.WithContext(a.ctx).WithError(err).Errorf("make type has error, barcode= '%s' name= '%s'", p.Barcode, p.Name) + } + }) + + return nil + } + + if p := param.GetWaitForTape(); p != nil { + return a.switchStep(entity.JobArchiveStep_WaitForTape, entity.JobStatus_Processing, mapset.NewThreadUnsafeSet(entity.JobArchiveStep_Pending, entity.JobArchiveStep_Copying)) + } + + if p := param.GetFinished(); p != nil { + if err := a.switchStep(entity.JobArchiveStep_Finished, entity.JobStatus_Completed, mapset.NewThreadUnsafeSet(entity.JobArchiveStep_Copying)); err != nil { + return err + } + + a.logFile.Close() + runningArchives.Delete(a.job.ID) + return nil + } + + return nil +} + +func (a *jobArchiveExecutor) makeTape(device, barcode, name string) (*library.Tape, error) { + if !a.exe.occupyDevice(device) { + return nil, fmt.Errorf("device is using, device= %s", device) + } + defer a.exe.releaseDevice(device) + defer a.makeTapeFinished() + + encryption, keyPath, keyRecycle, err := a.exe.newKey() + if err != nil { + return nil, err + } + defer func() { + time.Sleep(time.Second) + keyRecycle() + }() + + if err := runCmd(a.logger, a.exe.makeEncryptCmd(a.ctx, device, keyPath, barcode, name)); err != nil { + return nil, fmt.Errorf("run encrypt script fail, %w", err) + } + + mkfsCmd := exec.CommandContext(a.ctx, a.exe.mkfsScript) + mkfsCmd.Env = append(mkfsCmd.Env, fmt.Sprintf("DEVICE=%s", device), fmt.Sprintf("TAPE_BARCODE=%s", barcode), fmt.Sprintf("TAPE_NAME=%s", name)) + if err := runCmd(a.logger, mkfsCmd); err != nil { + return nil, fmt.Errorf("run mkfs script fail, %w", err) + } + + mountPoint, err := os.MkdirTemp("", "*.ltfs") + if err != nil { + return nil, fmt.Errorf("create temp mountpoint, %w", err) + } + + mountCmd := exec.CommandContext(a.ctx, a.exe.mountScript) + mountCmd.Env = append(mountCmd.Env, fmt.Sprintf("DEVICE=%s", device), fmt.Sprintf("MOUNT_POINT=%s", mountPoint)) + if err := runCmd(a.logger, mountCmd); err != nil { + return nil, fmt.Errorf("run mount script fail, %w", err) + } + defer func() { + umountCmd := exec.CommandContext(a.ctx, a.exe.umountScript) + umountCmd.Env = append(umountCmd.Env, fmt.Sprintf("MOUNT_POINT=%s", mountPoint)) + if err := runCmd(a.logger, umountCmd); err != nil { + a.logger.WithContext(a.ctx).WithError(err).Errorf("run umount script fail, %s", mountPoint) + return + } + if err := os.Remove(mountPoint); err != nil { + a.logger.WithContext(a.ctx).WithError(err).Errorf("remove mount point fail, %s", mountPoint) + return + } + }() + + opts := make([]acp.Option, 0, 4) + for _, source := range a.state.Sources { + if source.Status == entity.CopyStatus_Submited { + continue + } + opts = append(opts, acp.AccurateSource(source.Source.Base, source.Source.Path)) + } + + opts = append(opts, acp.Target(mountPoint)) + opts = append(opts, acp.WithHash(true)) + opts = append(opts, acp.SetToDevice(acp.LinearDevice(true))) + opts = append(opts, acp.WithLogger(a.logger)) + + reportHander, reportGetter := acp.NewReportGetter() + opts = append(opts, acp.WithEventHandler(reportHander)) + opts = append(opts, acp.WithEventHandler(func(ev acp.Event) { + switch e := ev.(type) { + case *acp.EventUpdateCount: + atomic.StoreInt64(&a.progress.totalBytes, e.Bytes) + atomic.StoreInt64(&a.progress.totalFiles, e.Files) + return + case *acp.EventUpdateProgress: + atomic.StoreInt64(&a.progress.bytes, e.Bytes) + atomic.StoreInt64(&a.progress.files, e.Files) + return + case *acp.EventUpdateJob: + job := e.Job + src := entity.NewSourceFromACPJob(job) + + var targetStatus entity.CopyStatus + switch job.Status { + case "pending": + targetStatus = entity.CopyStatus_Pending + case "preparing": + a.logger.Infof("file '%s' starts to prepare for copy, size= %d", src.RealPath(), job.Size) + targetStatus = entity.CopyStatus_Running + case "finished": + a.logger.Infof("file '%s' copy finished, size= %d", src.RealPath(), job.Size) + targetStatus = entity.CopyStatus_Staged + default: + return + } + + a.stateLock.Lock() + defer a.stateLock.Unlock() + + idx := sort.Search(len(a.state.Sources), func(idx int) bool { + return src.Compare(a.state.Sources[idx].Source) <= 0 + }) + + target := a.state.Sources[idx] + if target == nil || !src.Equal(target.Source) { + return + } + target.Status = targetStatus + + if _, err := a.exe.SaveJob(a.ctx, a.job); err != nil { + logrus.WithContext(a.ctx).Infof("save job for update file fail, name= %s", job.Base+path.Join(job.Path...)) + } + return + } + })) + + copyer, err := acp.New(a.ctx, opts...) + if err != nil { + return nil, fmt.Errorf("start copy fail, %w", err) + } + copyer.Wait() + + report := reportGetter() + sort.Slice(report.Jobs, func(i, j int) bool { + return entity.NewSourceFromACPJob(report.Jobs[i]).Compare(entity.NewSourceFromACPJob(report.Jobs[j])) < 0 + }) + + filteredJobs := make([]*acp.Job, 0, len(report.Jobs)) + files := make([]*library.TapeFile, 0, len(report.Jobs)) + for _, job := range report.Jobs { + if len(job.SuccessTargets) == 0 { + continue + } + if !job.Mode.IsRegular() { + continue + } + + hash, err := hex.DecodeString(job.SHA256) + if err != nil { + return nil, fmt.Errorf("decode sha256 fail, err= %w", err) + } + + files = append(files, &library.TapeFile{ + Path: path.Join(job.Path...), + Size: job.Size, + Mode: job.Mode, + ModTime: job.ModTime, + WriteTime: job.WriteTime, + Hash: hash, + }) + filteredJobs = append(filteredJobs, job) + } + + tape, err := a.exe.lib.CreateTape(a.ctx, &library.Tape{ + Barcode: barcode, + Name: name, + Encryption: encryption, + CreateTime: time.Now(), + }, files) + if err != nil { + return nil, fmt.Errorf("create tape fail, barcode= '%s' name= '%s', %w", barcode, name, err) + } + if err := a.exe.lib.TrimFiles(a.ctx); err != nil { + a.logger.WithError(err).Warnf("trim library files fail") + } + + if err := a.markSourcesAsSubmited(filteredJobs); err != nil { + a.submit(&entity.JobArchiveNextParam{Param: &entity.JobArchiveNextParam_WaitForTape{WaitForTape: &entity.JobArchiveWaitForTapeParam{}}}) + return nil, err + } + + return tape, nil +} + +func (a *jobArchiveExecutor) switchStep(target entity.JobArchiveStep, status entity.JobStatus, expect mapset.Set[entity.JobArchiveStep]) error { + a.stateLock.Lock() + defer a.stateLock.Unlock() + + if !expect.Contains(a.state.Step) { + return fmt.Errorf("unexpected current step, target= '%s' expect= '%s' has= '%s'", target, expect, a.state.Step) + } + + a.state.Step = target + a.job.Status = status + if _, err := a.exe.SaveJob(a.ctx, a.job); err != nil { + return fmt.Errorf("switch to step copying, save job fail, %w", err) + } + + return nil +} + +func (a *jobArchiveExecutor) markSourcesAsSubmited(jobs []*acp.Job) error { + a.stateLock.Lock() + defer a.stateLock.Unlock() + + searchableSource := a.state.Sources[:] + for _, job := range jobs { + src := entity.NewSourceFromACPJob(job) + for idx, testSrc := range searchableSource { + if src.Compare(testSrc.Source) <= 0 { + searchableSource = searchableSource[idx:] + break + } + } + + target := searchableSource[0] + if target == nil || !src.Equal(target.Source) { + continue + } + + target.Status = entity.CopyStatus_Submited + } + + if _, err := a.exe.SaveJob(a.ctx, a.job); err != nil { + return fmt.Errorf("mark sources as submited, save job, %w", err) + } + + atomic.StoreInt64(&a.progress.bytes, 0) + atomic.StoreInt64(&a.progress.files, 0) + atomic.StoreInt64(&a.progress.totalBytes, 0) + atomic.StoreInt64(&a.progress.totalFiles, 0) + return nil +} + +func (a *jobArchiveExecutor) getTodoSources() int { + a.stateLock.Lock() + defer a.stateLock.Unlock() + + var todo int + for _, s := range a.state.Sources { + if s.Status == entity.CopyStatus_Submited { + continue + } + todo++ + } + + return todo +} + +func (a *jobArchiveExecutor) makeTapeFinished() { + if a.getTodoSources() > 0 { + a.submit(&entity.JobArchiveNextParam{Param: &entity.JobArchiveNextParam_WaitForTape{WaitForTape: &entity.JobArchiveWaitForTapeParam{}}}) + } else { + a.submit(&entity.JobArchiveNextParam{Param: &entity.JobArchiveNextParam_Finished{Finished: &entity.JobArchiveFinishedParam{}}}) + } +} diff --git a/executor/job_archive_param.go b/executor/job_archive_param.go new file mode 100644 index 0000000..09fc212 --- /dev/null +++ b/executor/job_archive_param.go @@ -0,0 +1,74 @@ +package executor + +import ( + "context" + "fmt" + "os" + "sort" + + "github.com/abc950309/acp" + "github.com/abc950309/tapewriter/entity" +) + +func (e *Executor) initArchive(ctx context.Context, job *Job, param *entity.JobParamArchive) error { + var err error + sources := make([]*entity.SourceState, 0, len(param.Sources)*8) + for _, src := range param.Sources { + sources, err = walk(ctx, src, sources) + if err != nil { + return err + } + } + sort.Slice(sources, func(i, j int) bool { + return sources[i].Source.Compare(sources[j].Source) < 0 + }) + + for idx, src := range sources { + if idx > 0 && sources[idx-1].Source.Equal(src.Source) { + return fmt.Errorf("have multi file with same path, path= %s", src.Source.RealPath()) + } + } + + job.State = &entity.JobState{State: &entity.JobState_Archive{Archive: &entity.JobStateArchive{ + Step: entity.JobArchiveStep_Pending, + Sources: sources, + }}} + return nil +} + +func walk(ctx context.Context, src *entity.Source, sources []*entity.SourceState) ([]*entity.SourceState, error) { + path := src.RealPath() + + stat, err := os.Stat(path) + if err != nil { + return nil, fmt.Errorf("walk get stat, path= '%s', %w", path, err) + } + + mode := stat.Mode() + if mode.IsRegular() { + if stat.Name() == ".DS_Store" { + return sources, nil + } + return append(sources, &entity.SourceState{ + Source: src, + Size: stat.Size(), + Status: entity.CopyStatus_Pending, + }), nil + } + if mode&acp.UnexpectFileMode != 0 { + return sources, nil + } + + files, err := os.ReadDir(path) + if err != nil { + return nil, fmt.Errorf("walk read dir, path= '%s', %w", path, err) + } + for _, file := range files { + sources, err = walk(ctx, src.Append(file.Name()), sources) + if err != nil { + return nil, err + } + } + + return sources, nil +} diff --git a/executor/job_archive_start.go b/executor/job_archive_start.go new file mode 100644 index 0000000..b0abce0 --- /dev/null +++ b/executor/job_archive_start.go @@ -0,0 +1,15 @@ +package executor + +import ( + "context" + + "github.com/abc950309/tapewriter/entity" +) + +func (e *Executor) startArchive(ctx context.Context, job *Job) error { + return e.Submit(ctx, job, &entity.JobNextParam{Param: &entity.JobNextParam_Archive{ + Archive: &entity.JobArchiveNextParam{Param: &entity.JobArchiveNextParam_WaitForTape{ + WaitForTape: &entity.JobArchiveWaitForTapeParam{}, + }}, + }}) +} diff --git a/executor/job_restore.go b/executor/job_restore.go new file mode 100644 index 0000000..b19f28f --- /dev/null +++ b/executor/job_restore.go @@ -0,0 +1,59 @@ +package executor + +import ( + "context" + "fmt" + "os" + "os/exec" + "time" + + "github.com/abc950309/tapewriter/library" + "github.com/sirupsen/logrus" +) + +func (e *Executor) RestoreLoadTape(ctx context.Context, device string, tape *library.Tape) error { + if !e.occupyDevice(device) { + return fmt.Errorf("device is using, device= %s", device) + } + defer e.releaseDevice(device) + + keyPath, keyRecycle, err := e.restoreKey(tape.Encryption) + if err != nil { + return err + } + defer func() { + time.Sleep(time.Second) + keyRecycle() + }() + + logger := logrus.StandardLogger() + + if err := runCmd(logger, e.makeEncryptCmd(ctx, device, keyPath, tape.Barcode, tape.Name)); err != nil { + return fmt.Errorf("run encrypt script fail, %w", err) + } + + mountPoint, err := os.MkdirTemp("", "*.ltfs") + if err != nil { + return fmt.Errorf("create temp mountpoint, %w", err) + } + + mountCmd := exec.CommandContext(ctx, e.mountScript) + mountCmd.Env = append(mountCmd.Env, fmt.Sprintf("DEVICE=%s", device), fmt.Sprintf("MOUNT_POINT=%s", mountPoint)) + if err := runCmd(logger, mountCmd); err != nil { + return fmt.Errorf("run mount script fail, %w", err) + } + // defer func() { + // umountCmd := exec.CommandContext(ctx, e.umountScript) + // umountCmd.Env = append(umountCmd.Env, fmt.Sprintf("MOUNT_POINT=%s", mountPoint)) + // if err := runCmd(logger, umountCmd); err != nil { + // logger.WithContext(ctx).WithError(err).Errorf("run umount script fail, %s", mountPoint) + // return + // } + // if err := os.Remove(mountPoint); err != nil { + // logger.WithContext(ctx).WithError(err).Errorf("remove mount point fail, %s", mountPoint) + // return + // } + // }() + + return nil +} diff --git a/executor/key.go b/executor/key.go new file mode 100644 index 0000000..96da631 --- /dev/null +++ b/executor/key.go @@ -0,0 +1,54 @@ +package executor + +import ( + "context" + "crypto/rand" + "encoding/hex" + "fmt" + "os" + "os/exec" + "strings" +) + +const ( + keySize = 256 + keyV1Header = "v1:" +) + +// restoreKey returns (path, recycle, error) +func (e *Executor) restoreKey(str string) (string, func(), error) { + file, err := os.CreateTemp("", "*.key") + if err != nil { + return "", nil, fmt.Errorf("restore key, create temp, %w", err) + } + defer file.Close() + + if strings.HasPrefix(str, keyV1Header) { + if _, err := file.WriteString(str[len(keyV1Header):]); err != nil { + return "", nil, fmt.Errorf("restore key, write key, %w", err) + } + } + + return file.Name(), func() { os.Remove(file.Name()) }, nil +} + +// newKey returns (key, path, recycle, error) +func (e *Executor) newKey() (string, string, func(), error) { + keyBuf := make([]byte, keySize/8) + if _, err := rand.Reader.Read(keyBuf); err != nil { + return "", "", nil, fmt.Errorf("gen key fail, %w", err) + } + key := keyV1Header + hex.EncodeToString(keyBuf) + + path, recycle, err := e.restoreKey(key) + if err != nil { + return "", "", nil, err + } + return key, path, recycle, nil +} + +func (e *Executor) makeEncryptCmd(ctx context.Context, device, keyPath, barcode, name string) *exec.Cmd { + cmd := exec.CommandContext(ctx, e.encryptScript) + cmd.Env = append(cmd.Env, fmt.Sprintf("DEVICE=%s", device), fmt.Sprintf("KEY_FILE=%s", keyPath), fmt.Sprintf("TAPE_BARCODE=%s", barcode), fmt.Sprintf("TAPE_NAME=%s", name)) + return cmd +} diff --git a/executor/log.go b/executor/log.go new file mode 100644 index 0000000..be98fba --- /dev/null +++ b/executor/log.go @@ -0,0 +1,50 @@ +package executor + +import ( + "errors" + "fmt" + "os" + "os/exec" + "path" + + "github.com/sirupsen/logrus" +) + +func (e *Executor) logPath(jobID int64) (string, string) { + return path.Join(e.workDirectory, "job-logs"), fmt.Sprintf("%d.log", jobID) +} + +func (e *Executor) newLogWriter(jobID int64) (*os.File, error) { + dir, filename := e.logPath(jobID) + if err := os.MkdirAll(dir, 0755); err != nil { + return nil, fmt.Errorf("make job log dir fail, path= '%s', err= %w", dir, err) + } + + file, err := os.OpenFile(path.Join(dir, filename), os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644) + if err != nil { + return nil, fmt.Errorf("create file fail, path= '%s', err= %w", path.Join(dir, filename), err) + } + + return file, nil +} + +func (e *Executor) NewLogReader(jobID int64) (*os.File, error) { + dir, filename := e.logPath(jobID) + file, err := os.OpenFile(path.Join(dir, filename), os.O_RDONLY, 0644) + if err != nil { + if errors.Is(err, os.ErrNotExist) { + return nil, nil + } + return nil, fmt.Errorf("create file") + } + + return file, nil +} + +func runCmd(logger *logrus.Logger, cmd *exec.Cmd) error { + writer := logger.WriterLevel(logrus.InfoLevel) + cmd.Stdout = writer + cmd.Stderr = writer + + return cmd.Run() +} diff --git a/executor/progress.go b/executor/progress.go new file mode 100644 index 0000000..f09f7b3 --- /dev/null +++ b/executor/progress.go @@ -0,0 +1,8 @@ +package executor + +type progress struct { + speed int64 + + totalBytes, totalFiles int64 + bytes, files int64 +} diff --git a/external/external.go b/external/external.go new file mode 100644 index 0000000..d184667 --- /dev/null +++ b/external/external.go @@ -0,0 +1,11 @@ +package external + +import "github.com/abc950309/tapewriter/library" + +type External struct { + lib *library.Library +} + +func New(lib *library.Library) *External { + return &External{lib: lib} +} diff --git a/external/from_json.go b/external/from_json.go new file mode 100644 index 0000000..fee2c63 --- /dev/null +++ b/external/from_json.go @@ -0,0 +1,63 @@ +package external + +import ( + "context" + "encoding/hex" + "encoding/json" + "fmt" + "io" + "path" + + "github.com/abc950309/acp" + "github.com/abc950309/tapewriter/library" +) + +func (e *External) ImportACPReport(ctx context.Context, barname, name, encryption string, reader io.Reader) error { + report := new(acp.Report) + if err := json.NewDecoder(reader).Decode(report); err != nil { + return err + } + + files := make([]*library.TapeFile, 0, 16) + for _, f := range report.Jobs { + if len(f.SuccessTargets) == 0 { + continue + } + if !f.Mode.IsRegular() { + continue + } + + hash, err := hex.DecodeString(f.SHA256) + if err != nil { + return fmt.Errorf("decode sha256 fail, err= %w", err) + } + + files = append(files, &library.TapeFile{ + Path: path.Join(f.Path...), + Size: f.Size, + Mode: f.Mode, + ModTime: f.ModTime, + WriteTime: f.WriteTime, + Hash: hash, + }) + } + + if len(files) == 0 { + return fmt.Errorf("cannot found files from report") + } + + if _, err := e.lib.CreateTape(ctx, &library.Tape{ + Barcode: barname, + Name: name, + Encryption: encryption, + CreateTime: files[0].WriteTime, + }, files); err != nil { + return fmt.Errorf("save tape, err= %w", err) + } + + if err := e.lib.TrimFiles(ctx); err != nil { + return err + } + + return nil +} diff --git a/frontend/.gitignore b/frontend/.gitignore new file mode 100644 index 0000000..a547bf3 --- /dev/null +++ b/frontend/.gitignore @@ -0,0 +1,24 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +lerna-debug.log* + +node_modules +dist +dist-ssr +*.local + +# Editor directories and files +.vscode/* +!.vscode/extensions.json +.idea +.DS_Store +*.suo +*.ntvs* +*.njsproj +*.sln +*.sw? diff --git a/frontend/.prettierrc.json b/frontend/.prettierrc.json new file mode 100644 index 0000000..65e13e4 --- /dev/null +++ b/frontend/.prettierrc.json @@ -0,0 +1,3 @@ +{ + "printWidth": 160 +} diff --git a/frontend/index.html b/frontend/index.html new file mode 100644 index 0000000..5ead749 --- /dev/null +++ b/frontend/index.html @@ -0,0 +1,16 @@ + + + + + + + Tape Writer + + + +
+ + + diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000..05d887a --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,46 @@ +{ + "name": "tape-manager", + "private": true, + "version": "0.0.0", + "type": "module", + "scripts": { + "dev": "vite", + "build": "tsc && vite build", + "preview": "vite preview", + "gen-proto": "protoc --ts_out ./src/entity --proto_path ../entity/ `ls ../entity/*.proto` && ./src/entity/gen_index.sh" + }, + "dependencies": { + "@emotion/react": "^11.10.4", + "@emotion/styled": "^11.10.4", + "@fortawesome/fontawesome-svg-core": "^1.2.32", + "@fortawesome/free-solid-svg-icons": "^5.13.1", + "@fortawesome/react-fontawesome": "^0.1.12", + "@mui/icons-material": "^5.10.16", + "@mui/material": "^5.10.9", + "@mui/styled-engine": "^5.10.8", + "@protobuf-ts/grpcweb-transport": "^2.8.2", + "@protobuf-ts/runtime": "^2.8.2", + "@protobuf-ts/runtime-rpc": "^2.8.2", + "chonky": "^2.3.2", + "chonky-icon-fontawesome": "^2.3.2", + "fast-text-encoding": "^1.0.6", + "filesize": "^10.0.5", + "moment": "^2.29.4", + "react": "^18.2.0", + "react-dnd": "^11.1.3", + "react-dnd-html5-backend": "^11.1.3", + "react-dom": "^18.2.0", + "react-is": "^18.2.0" + }, + "devDependencies": { + "@protobuf-ts/plugin": "^2.8.2", + "@types/react": "^18.0.17", + "@types/react-dom": "^18.0.6", + "@vitejs/plugin-react": "^2.1.0", + "less": "^4.1.3", + "prettier": "2.7.1", + "tsdef": "^0.0.14", + "typescript": "^4.6.4", + "vite": "^3.1.0" + } +} diff --git a/frontend/pnpm-lock.yaml b/frontend/pnpm-lock.yaml new file mode 100644 index 0000000..886dbd1 --- /dev/null +++ b/frontend/pnpm-lock.yaml @@ -0,0 +1,2495 @@ +lockfileVersion: 5.4 + +specifiers: + '@emotion/react': ^11.10.4 + '@emotion/styled': ^11.10.4 + '@fortawesome/fontawesome-svg-core': ^1.2.32 + '@fortawesome/free-solid-svg-icons': ^5.13.1 + '@fortawesome/react-fontawesome': ^0.1.12 + '@mui/icons-material': ^5.10.16 + '@mui/material': ^5.10.9 + '@mui/styled-engine': ^5.10.8 + '@protobuf-ts/grpcweb-transport': ^2.8.2 + '@protobuf-ts/plugin': ^2.8.2 + '@protobuf-ts/runtime': ^2.8.2 + '@protobuf-ts/runtime-rpc': ^2.8.2 + '@types/react': ^18.0.17 + '@types/react-dom': ^18.0.6 + '@vitejs/plugin-react': ^2.1.0 + chonky: ^2.3.2 + chonky-icon-fontawesome: ^2.3.2 + fast-text-encoding: ^1.0.6 + filesize: ^10.0.5 + less: ^4.1.3 + moment: ^2.29.4 + prettier: 2.7.1 + react: ^18.2.0 + react-dnd: ^11.1.3 + react-dnd-html5-backend: ^11.1.3 + react-dom: ^18.2.0 + react-is: ^18.2.0 + tsdef: ^0.0.14 + typescript: ^4.6.4 + vite: ^3.1.0 + +dependencies: + '@emotion/react': 11.10.4_iapumuv4e6jcjznwuxpf4tt22e + '@emotion/styled': 11.10.4_g3tud4ene45llglqap74b5kkse + '@fortawesome/fontawesome-svg-core': 1.2.36 + '@fortawesome/free-solid-svg-icons': 5.15.4 + '@fortawesome/react-fontawesome': 0.1.19_itb73sutiv5dicgxanxbjij2uu + '@mui/icons-material': 5.10.16_6usjrp3ypnzobhq35dcwvjrt3m + '@mui/material': 5.10.9_ikcgkdnp4bn3rgptamntbhbo7e + '@mui/styled-engine': 5.10.8_hfzxdiydbrbhhfpkwuv3jhvwmq + '@protobuf-ts/grpcweb-transport': 2.8.2 + '@protobuf-ts/runtime': 2.8.2 + '@protobuf-ts/runtime-rpc': 2.8.2 + chonky: 2.3.2_qge232svojtclvlhf4p5efbcfu + chonky-icon-fontawesome: 2.3.2_qge232svojtclvlhf4p5efbcfu + fast-text-encoding: 1.0.6 + filesize: 10.0.5 + moment: 2.29.4 + react: 18.2.0 + react-dnd: 11.1.3_biqbaboplfbrettd7655fr4n2y + react-dnd-html5-backend: 11.1.3 + react-dom: 18.2.0_react@18.2.0 + react-is: 18.2.0 + +devDependencies: + '@protobuf-ts/plugin': 2.8.2 + '@types/react': 18.0.21 + '@types/react-dom': 18.0.6 + '@vitejs/plugin-react': 2.1.0_vite@3.1.8 + less: 4.1.3 + prettier: 2.7.1 + tsdef: 0.0.14 + typescript: 4.8.4 + vite: 3.1.8_less@4.1.3 + +packages: + + /@ampproject/remapping/2.2.0: + resolution: {integrity: sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==} + engines: {node: '>=6.0.0'} + dependencies: + '@jridgewell/gen-mapping': 0.1.1 + '@jridgewell/trace-mapping': 0.3.17 + dev: true + + /@babel/code-frame/7.18.6: + resolution: {integrity: sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/highlight': 7.18.6 + + /@babel/compat-data/7.19.4: + resolution: {integrity: sha512-CHIGpJcUQ5lU9KrPHTjBMhVwQG6CQjxfg36fGXl3qk/Gik1WwWachaXFuo0uCWJT/mStOKtcbFJCaVLihC1CMw==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/core/7.19.3: + resolution: {integrity: sha512-WneDJxdsjEvyKtXKsaBGbDeiyOjR5vYq4HcShxnIbG0qixpoHjI3MqeZM9NDvsojNCEBItQE4juOo/bU6e72gQ==} + engines: {node: '>=6.9.0'} + dependencies: + '@ampproject/remapping': 2.2.0 + '@babel/code-frame': 7.18.6 + '@babel/generator': 7.19.5 + '@babel/helper-compilation-targets': 7.19.3_@babel+core@7.19.3 + '@babel/helper-module-transforms': 7.19.0 + '@babel/helpers': 7.19.4 + '@babel/parser': 7.19.4 + '@babel/template': 7.18.10 + '@babel/traverse': 7.19.4 + '@babel/types': 7.19.4 + convert-source-map: 1.9.0 + debug: 4.3.4 + gensync: 1.0.0-beta.2 + json5: 2.2.1 + semver: 6.3.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/generator/7.19.5: + resolution: {integrity: sha512-DxbNz9Lz4aMZ99qPpO1raTbcrI1ZeYh+9NR9qhfkQIbFtVEqotHojEBxHzmxhVONkGt6VyrqVQcgpefMy9pqcg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.19.4 + '@jridgewell/gen-mapping': 0.3.2 + jsesc: 2.5.2 + + /@babel/helper-annotate-as-pure/7.18.6: + resolution: {integrity: sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.19.4 + + /@babel/helper-compilation-targets/7.19.3_@babel+core@7.19.3: + resolution: {integrity: sha512-65ESqLGyGmLvgR0mst5AdW1FkNlj9rQsCKduzEoEPhBCDFGXvz2jW6bXFG6i0/MrV2s7hhXjjb2yAzcPuQlLwg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + dependencies: + '@babel/compat-data': 7.19.4 + '@babel/core': 7.19.3 + '@babel/helper-validator-option': 7.18.6 + browserslist: 4.21.4 + semver: 6.3.0 + dev: true + + /@babel/helper-environment-visitor/7.18.9: + resolution: {integrity: sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==} + engines: {node: '>=6.9.0'} + + /@babel/helper-function-name/7.19.0: + resolution: {integrity: sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/template': 7.18.10 + '@babel/types': 7.19.4 + + /@babel/helper-hoist-variables/7.18.6: + resolution: {integrity: sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.19.4 + + /@babel/helper-module-imports/7.18.6: + resolution: {integrity: sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.19.4 + + /@babel/helper-module-transforms/7.19.0: + resolution: {integrity: sha512-3HBZ377Fe14RbLIA+ac3sY4PTgpxHVkFrESaWhoI5PuyXPBBX8+C34qblV9G89ZtycGJCmCI/Ut+VUDK4bltNQ==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-environment-visitor': 7.18.9 + '@babel/helper-module-imports': 7.18.6 + '@babel/helper-simple-access': 7.19.4 + '@babel/helper-split-export-declaration': 7.18.6 + '@babel/helper-validator-identifier': 7.19.1 + '@babel/template': 7.18.10 + '@babel/traverse': 7.19.4 + '@babel/types': 7.19.4 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/helper-plugin-utils/7.19.0: + resolution: {integrity: sha512-40Ryx7I8mT+0gaNxm8JGTZFUITNqdLAgdg0hXzeVZxVD6nFsdhQvip6v8dqkRHzsz1VFpFAaOCHNn0vKBL7Czw==} + engines: {node: '>=6.9.0'} + + /@babel/helper-simple-access/7.19.4: + resolution: {integrity: sha512-f9Xq6WqBFqaDfbCzn2w85hwklswz5qsKlh7f08w4Y9yhJHpnNC0QemtSkK5YyOY8kPGvyiwdzZksGUhnGdaUIg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.19.4 + dev: true + + /@babel/helper-split-export-declaration/7.18.6: + resolution: {integrity: sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.19.4 + + /@babel/helper-string-parser/7.19.4: + resolution: {integrity: sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw==} + engines: {node: '>=6.9.0'} + + /@babel/helper-validator-identifier/7.19.1: + resolution: {integrity: sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==} + engines: {node: '>=6.9.0'} + + /@babel/helper-validator-option/7.18.6: + resolution: {integrity: sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/helpers/7.19.4: + resolution: {integrity: sha512-G+z3aOx2nfDHwX/kyVii5fJq+bgscg89/dJNWpYeKeBv3v9xX8EIabmx1k6u9LS04H7nROFVRVK+e3k0VHp+sw==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/template': 7.18.10 + '@babel/traverse': 7.19.4 + '@babel/types': 7.19.4 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/highlight/7.18.6: + resolution: {integrity: sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-validator-identifier': 7.19.1 + chalk: 2.4.2 + js-tokens: 4.0.0 + + /@babel/parser/7.19.4: + resolution: {integrity: sha512-qpVT7gtuOLjWeDTKLkJ6sryqLliBaFpAtGeqw5cs5giLldvh+Ch0plqnUMKoVAUS6ZEueQQiZV+p5pxtPitEsA==} + engines: {node: '>=6.0.0'} + hasBin: true + dependencies: + '@babel/types': 7.19.4 + + /@babel/plugin-syntax-jsx/7.18.6: + resolution: {integrity: sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/helper-plugin-utils': 7.19.0 + dev: false + + /@babel/plugin-syntax-jsx/7.18.6_@babel+core@7.19.3: + resolution: {integrity: sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.3 + '@babel/helper-plugin-utils': 7.19.0 + dev: true + + /@babel/plugin-transform-react-jsx-development/7.18.6_@babel+core@7.19.3: + resolution: {integrity: sha512-SA6HEjwYFKF7WDjWcMcMGUimmw/nhNRDWxr+KaLSCrkD/LMDBvWRmHAYgE1HDeF8KUuI8OAu+RT6EOtKxSW2qA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.3 + '@babel/plugin-transform-react-jsx': 7.19.0_@babel+core@7.19.3 + dev: true + + /@babel/plugin-transform-react-jsx-self/7.18.6_@babel+core@7.19.3: + resolution: {integrity: sha512-A0LQGx4+4Jv7u/tWzoJF7alZwnBDQd6cGLh9P+Ttk4dpiL+J5p7NSNv/9tlEFFJDq3kjxOavWmbm6t0Gk+A3Ig==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.3 + '@babel/helper-plugin-utils': 7.19.0 + dev: true + + /@babel/plugin-transform-react-jsx-source/7.18.6_@babel+core@7.19.3: + resolution: {integrity: sha512-utZmlASneDfdaMh0m/WausbjUjEdGrQJz0vFK93d7wD3xf5wBtX219+q6IlCNZeguIcxS2f/CvLZrlLSvSHQXw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.3 + '@babel/helper-plugin-utils': 7.19.0 + dev: true + + /@babel/plugin-transform-react-jsx/7.19.0_@babel+core@7.19.3: + resolution: {integrity: sha512-UVEvX3tXie3Szm3emi1+G63jyw1w5IcMY0FSKM+CRnKRI5Mr1YbCNgsSTwoTwKphQEG9P+QqmuRFneJPZuHNhg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.3 + '@babel/helper-annotate-as-pure': 7.18.6 + '@babel/helper-module-imports': 7.18.6 + '@babel/helper-plugin-utils': 7.19.0 + '@babel/plugin-syntax-jsx': 7.18.6_@babel+core@7.19.3 + '@babel/types': 7.19.4 + dev: true + + /@babel/runtime/7.19.4: + resolution: {integrity: sha512-EXpLCrk55f+cYqmHsSR+yD/0gAIMxxA9QK9lnQWzhMCvt+YmoBN7Zx94s++Kv0+unHk39vxNO8t+CMA2WSS3wA==} + engines: {node: '>=6.9.0'} + dependencies: + regenerator-runtime: 0.13.11 + dev: false + + /@babel/runtime/7.20.6: + resolution: {integrity: sha512-Q+8MqP7TiHMWzSfwiJwXCjyf4GYA4Dgw3emg/7xmwsdLJOZUp+nMqcOwOzzYheuM1rhDu8FSj2l0aoMygEuXuA==} + engines: {node: '>=6.9.0'} + dependencies: + regenerator-runtime: 0.13.11 + dev: false + + /@babel/template/7.18.10: + resolution: {integrity: sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/code-frame': 7.18.6 + '@babel/parser': 7.19.4 + '@babel/types': 7.19.4 + + /@babel/traverse/7.19.4: + resolution: {integrity: sha512-w3K1i+V5u2aJUOXBFFC5pveFLmtq1s3qcdDNC2qRI6WPBQIDaKFqXxDEqDO/h1dQ3HjsZoZMyIy6jGLq0xtw+g==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/code-frame': 7.18.6 + '@babel/generator': 7.19.5 + '@babel/helper-environment-visitor': 7.18.9 + '@babel/helper-function-name': 7.19.0 + '@babel/helper-hoist-variables': 7.18.6 + '@babel/helper-split-export-declaration': 7.18.6 + '@babel/parser': 7.19.4 + '@babel/types': 7.19.4 + debug: 4.3.4 + globals: 11.12.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/traverse/7.19.4_supports-color@5.5.0: + resolution: {integrity: sha512-w3K1i+V5u2aJUOXBFFC5pveFLmtq1s3qcdDNC2qRI6WPBQIDaKFqXxDEqDO/h1dQ3HjsZoZMyIy6jGLq0xtw+g==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/code-frame': 7.18.6 + '@babel/generator': 7.19.5 + '@babel/helper-environment-visitor': 7.18.9 + '@babel/helper-function-name': 7.19.0 + '@babel/helper-hoist-variables': 7.18.6 + '@babel/helper-split-export-declaration': 7.18.6 + '@babel/parser': 7.19.4 + '@babel/types': 7.19.4 + debug: 4.3.4_supports-color@5.5.0 + globals: 11.12.0 + transitivePeerDependencies: + - supports-color + dev: false + + /@babel/types/7.19.4: + resolution: {integrity: sha512-M5LK7nAeS6+9j7hAq+b3fQs+pNfUtTGq+yFFfHnauFA8zQtLRfmuipmsKDKKLuyG+wC8ABW43A153YNawNTEtw==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-string-parser': 7.19.4 + '@babel/helper-validator-identifier': 7.19.1 + to-fast-properties: 2.0.0 + + /@emotion/babel-plugin/11.10.2: + resolution: {integrity: sha512-xNQ57njWTFVfPAc3cjfuaPdsgLp5QOSuRsj9MA6ndEhH/AzuZM86qIQzt6rq+aGBwj3n5/TkLmU5lhAfdRmogA==} + peerDependencies: + '@babel/core': ^7.0.0 + dependencies: + '@babel/helper-module-imports': 7.18.6 + '@babel/plugin-syntax-jsx': 7.18.6 + '@babel/runtime': 7.20.6 + '@emotion/hash': 0.9.0 + '@emotion/memoize': 0.8.0 + '@emotion/serialize': 1.1.0 + babel-plugin-macros: 3.1.0 + convert-source-map: 1.9.0 + escape-string-regexp: 4.0.0 + find-root: 1.1.0 + source-map: 0.5.7 + stylis: 4.0.13 + dev: false + + /@emotion/cache/11.10.3: + resolution: {integrity: sha512-Psmp/7ovAa8appWh3g51goxu/z3iVms7JXOreq136D8Bbn6dYraPnmL6mdM8GThEx9vwSn92Fz+mGSjBzN8UPQ==} + dependencies: + '@emotion/memoize': 0.8.0 + '@emotion/sheet': 1.2.0 + '@emotion/utils': 1.2.0 + '@emotion/weak-memoize': 0.3.0 + stylis: 4.0.13 + dev: false + + /@emotion/hash/0.8.0: + resolution: {integrity: sha512-kBJtf7PH6aWwZ6fka3zQ0p6SBYzx4fl1LoZXE2RrnYST9Xljm7WfKJrU4g/Xr3Beg72MLrp1AWNUmuYJTL7Cow==} + dev: false + + /@emotion/hash/0.9.0: + resolution: {integrity: sha512-14FtKiHhy2QoPIzdTcvh//8OyBlknNs2nXRwIhG904opCby3l+9Xaf/wuPvICBF0rc1ZCNBd3nKe9cd2mecVkQ==} + dev: false + + /@emotion/is-prop-valid/0.7.3: + resolution: {integrity: sha512-uxJqm/sqwXw3YPA5GXX365OBcJGFtxUVkB6WyezqFHlNe9jqUWH5ur2O2M8dGBz61kn1g3ZBlzUunFQXQIClhA==} + dependencies: + '@emotion/memoize': 0.7.1 + dev: false + + /@emotion/is-prop-valid/1.2.0: + resolution: {integrity: sha512-3aDpDprjM0AwaxGE09bOPkNxHpBd+kA6jty3RnaEXdweX1DF1U3VQpPYb0g1IStAuK7SVQ1cy+bNBBKp4W3Fjg==} + dependencies: + '@emotion/memoize': 0.8.0 + dev: false + + /@emotion/memoize/0.7.1: + resolution: {integrity: sha512-Qv4LTqO11jepd5Qmlp3M1YEjBumoTHcHFdgPTQ+sFlIL5myi/7xu/POwP7IRu6odBdmLXdtIs1D6TuW6kbwbbg==} + dev: false + + /@emotion/memoize/0.8.0: + resolution: {integrity: sha512-G/YwXTkv7Den9mXDO7AhLWkE3q+I92B+VqAE+dYG4NGPaHZGvt3G8Q0p9vmE+sq7rTGphUbAvmQ9YpbfMQGGlA==} + dev: false + + /@emotion/react/11.10.4_iapumuv4e6jcjznwuxpf4tt22e: + resolution: {integrity: sha512-j0AkMpr6BL8gldJZ6XQsQ8DnS9TxEQu1R+OGmDZiWjBAJtCcbt0tS3I/YffoqHXxH6MjgI7KdMbYKw3MEiU9eA==} + peerDependencies: + '@babel/core': ^7.0.0 + '@types/react': '*' + react: '>=16.8.0' + peerDependenciesMeta: + '@babel/core': + optional: true + '@types/react': + optional: true + dependencies: + '@babel/runtime': 7.19.4 + '@emotion/babel-plugin': 11.10.2 + '@emotion/cache': 11.10.3 + '@emotion/serialize': 1.1.0 + '@emotion/use-insertion-effect-with-fallbacks': 1.0.0_react@18.2.0 + '@emotion/utils': 1.2.0 + '@emotion/weak-memoize': 0.3.0 + '@types/react': 18.0.21 + hoist-non-react-statics: 3.3.2 + react: 18.2.0 + dev: false + + /@emotion/serialize/1.1.0: + resolution: {integrity: sha512-F1ZZZW51T/fx+wKbVlwsfchr5q97iW8brAnXmsskz4d0hVB4O3M/SiA3SaeH06x02lSNzkkQv+n3AX3kCXKSFA==} + dependencies: + '@emotion/hash': 0.9.0 + '@emotion/memoize': 0.8.0 + '@emotion/unitless': 0.8.0 + '@emotion/utils': 1.2.0 + csstype: 3.1.1 + dev: false + + /@emotion/sheet/1.2.0: + resolution: {integrity: sha512-OiTkRgpxescko+M51tZsMq7Puu/KP55wMT8BgpcXVG2hqXc0Vo0mfymJ/Uj24Hp0i083ji/o0aLddh08UEjq8w==} + dev: false + + /@emotion/styled/11.10.4_g3tud4ene45llglqap74b5kkse: + resolution: {integrity: sha512-pRl4R8Ez3UXvOPfc2bzIoV8u9P97UedgHS4FPX594ntwEuAMA114wlaHvOK24HB48uqfXiGlYIZYCxVJ1R1ttQ==} + peerDependencies: + '@babel/core': ^7.0.0 + '@emotion/react': ^11.0.0-rc.0 + '@types/react': '*' + react: '>=16.8.0' + peerDependenciesMeta: + '@babel/core': + optional: true + '@types/react': + optional: true + dependencies: + '@babel/runtime': 7.19.4 + '@emotion/babel-plugin': 11.10.2 + '@emotion/is-prop-valid': 1.2.0 + '@emotion/react': 11.10.4_iapumuv4e6jcjznwuxpf4tt22e + '@emotion/serialize': 1.1.0 + '@emotion/use-insertion-effect-with-fallbacks': 1.0.0_react@18.2.0 + '@emotion/utils': 1.2.0 + '@types/react': 18.0.21 + react: 18.2.0 + dev: false + + /@emotion/stylis/0.8.5: + resolution: {integrity: sha512-h6KtPihKFn3T9fuIrwvXXUOwlx3rfUvfZIcP5a6rh8Y7zjE3O06hT5Ss4S/YI1AYhuZ1kjaE/5EaOOI2NqSylQ==} + dev: false + + /@emotion/unitless/0.7.5: + resolution: {integrity: sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg==} + dev: false + + /@emotion/unitless/0.8.0: + resolution: {integrity: sha512-VINS5vEYAscRl2ZUDiT3uMPlrFQupiKgHz5AA4bCH1miKBg4qtwkim1qPmJj/4WG6TreYMY111rEFsjupcOKHw==} + dev: false + + /@emotion/use-insertion-effect-with-fallbacks/1.0.0_react@18.2.0: + resolution: {integrity: sha512-1eEgUGmkaljiBnRMTdksDV1W4kUnmwgp7X9G8B++9GYwl1lUdqSndSriIrTJ0N7LQaoauY9JJ2yhiOYK5+NI4A==} + peerDependencies: + react: '>=16.8.0' + dependencies: + react: 18.2.0 + dev: false + + /@emotion/utils/1.2.0: + resolution: {integrity: sha512-sn3WH53Kzpw8oQ5mgMmIzzyAaH2ZqFEbozVVBSYp538E06OSE6ytOp7pRAjNQR+Q/orwqdQYJSe2m3hCOeznkw==} + dev: false + + /@emotion/weak-memoize/0.3.0: + resolution: {integrity: sha512-AHPmaAx+RYfZz0eYu6Gviiagpmiyw98ySSlQvCUhVGDRtDFe4DBS0x1bSjdF3gqUDYOczB+yYvBTtEylYSdRhg==} + dev: false + + /@esbuild/android-arm/0.15.11: + resolution: {integrity: sha512-PzMcQLazLBkwDEkrNPi9AbjFt6+3I7HKbiYF2XtWQ7wItrHvEOeO3T8Am434zAozWtVP7lrTue1bEfc2nYWeCA==} + engines: {node: '>=12'} + cpu: [arm] + os: [android] + requiresBuild: true + dev: true + optional: true + + /@esbuild/linux-loong64/0.15.11: + resolution: {integrity: sha512-geWp637tUhNmhL3Xgy4Bj703yXB9dqiLJe05lCUfjSFDrQf9C/8pArusyPUbUbPwlC/EAUjBw32sxuIl/11dZw==} + engines: {node: '>=12'} + cpu: [loong64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@formatjs/ecma402-abstract/1.11.4: + resolution: {integrity: sha512-EBikYFp2JCdIfGEb5G9dyCkTGDmC57KSHhRQOC3aYxoPWVZvfWCDjZwkGYHN7Lis/fmuWl906bnNTJifDQ3sXw==} + dependencies: + '@formatjs/intl-localematcher': 0.2.25 + tslib: 2.4.0 + dev: false + + /@formatjs/fast-memoize/1.2.1: + resolution: {integrity: sha512-Rg0e76nomkz3vF9IPlKeV+Qynok0r7YZjL6syLz4/urSg0IbjPZCB/iYUMNsYA643gh4mgrX3T7KEIFIxJBQeg==} + dependencies: + tslib: 2.4.0 + dev: false + + /@formatjs/icu-messageformat-parser/2.1.0: + resolution: {integrity: sha512-Qxv/lmCN6hKpBSss2uQ8IROVnta2r9jd3ymUEIjm2UyIkUCHVcbUVRGL/KS/wv7876edvsPe+hjHVJ4z8YuVaw==} + dependencies: + '@formatjs/ecma402-abstract': 1.11.4 + '@formatjs/icu-skeleton-parser': 1.3.6 + tslib: 2.4.0 + dev: false + + /@formatjs/icu-skeleton-parser/1.3.6: + resolution: {integrity: sha512-I96mOxvml/YLrwU2Txnd4klA7V8fRhb6JG/4hm3VMNmeJo1F03IpV2L3wWt7EweqNLES59SZ4d6hVOPCSf80Bg==} + dependencies: + '@formatjs/ecma402-abstract': 1.11.4 + tslib: 2.4.0 + dev: false + + /@formatjs/intl-displaynames/5.4.3: + resolution: {integrity: sha512-4r12A3mS5dp5hnSaQCWBuBNfi9Amgx2dzhU4lTFfhSxgb5DOAiAbMpg6+7gpWZgl4ahsj3l2r/iHIjdmdXOE2Q==} + dependencies: + '@formatjs/ecma402-abstract': 1.11.4 + '@formatjs/intl-localematcher': 0.2.25 + tslib: 2.4.0 + dev: false + + /@formatjs/intl-listformat/6.5.3: + resolution: {integrity: sha512-ozpz515F/+3CU+HnLi5DYPsLa6JoCfBggBSSg/8nOB5LYSFW9+ZgNQJxJ8tdhKYeODT+4qVHX27EeJLoxLGLNg==} + dependencies: + '@formatjs/ecma402-abstract': 1.11.4 + '@formatjs/intl-localematcher': 0.2.25 + tslib: 2.4.0 + dev: false + + /@formatjs/intl-localematcher/0.2.25: + resolution: {integrity: sha512-YmLcX70BxoSopLFdLr1Ds99NdlTI2oWoLbaUW2M406lxOIPzE1KQhRz2fPUkq34xVZQaihCoU29h0KK7An3bhA==} + dependencies: + tslib: 2.4.0 + dev: false + + /@formatjs/intl/2.2.1_typescript@4.8.4: + resolution: {integrity: sha512-vgvyUOOrzqVaOFYzTf2d3+ToSkH2JpR7x/4U1RyoHQLmvEaTQvXJ7A2qm1Iy3brGNXC/+/7bUlc3lpH+h/LOJA==} + peerDependencies: + typescript: ^4.5 + peerDependenciesMeta: + typescript: + optional: true + dependencies: + '@formatjs/ecma402-abstract': 1.11.4 + '@formatjs/fast-memoize': 1.2.1 + '@formatjs/icu-messageformat-parser': 2.1.0 + '@formatjs/intl-displaynames': 5.4.3 + '@formatjs/intl-listformat': 6.5.3 + intl-messageformat: 9.13.0 + tslib: 2.4.0 + typescript: 4.8.4 + dev: false + + /@fortawesome/fontawesome-common-types/0.2.36: + resolution: {integrity: sha512-a/7BiSgobHAgBWeN7N0w+lAhInrGxksn13uK7231n2m8EDPE3BMCl9NZLTGrj9ZXfCmC6LM0QLqXidIizVQ6yg==} + engines: {node: '>=6'} + requiresBuild: true + dev: false + + /@fortawesome/fontawesome-svg-core/1.2.36: + resolution: {integrity: sha512-YUcsLQKYb6DmaJjIHdDWpBIGCcyE/W+p/LMGvjQem55Mm2XWVAP5kWTMKWLv9lwpCVjpLxPyOMOyUocP1GxrtA==} + engines: {node: '>=6'} + requiresBuild: true + dependencies: + '@fortawesome/fontawesome-common-types': 0.2.36 + dev: false + + /@fortawesome/free-brands-svg-icons/5.13.1: + resolution: {integrity: sha512-dKwF+NpIV2LVCNBA7hibH53k+ChF4Wu59P2z35gu3zwRBZpmpLVhS9k1/RiSqUqkyXUQvA2rSv48GY6wp5axZQ==} + engines: {node: '>=6'} + dependencies: + '@fortawesome/fontawesome-common-types': 0.2.36 + dev: false + + /@fortawesome/free-solid-svg-icons/5.15.4: + resolution: {integrity: sha512-JLmQfz6tdtwxoihXLg6lT78BorrFyCf59SAwBM6qV/0zXyVeDygJVb3fk+j5Qat+Yvcxp1buLTY5iDh1ZSAQ8w==} + engines: {node: '>=6'} + requiresBuild: true + dependencies: + '@fortawesome/fontawesome-common-types': 0.2.36 + dev: false + + /@fortawesome/react-fontawesome/0.1.19_itb73sutiv5dicgxanxbjij2uu: + resolution: {integrity: sha512-Hyb+lB8T18cvLNX0S3llz7PcSOAJMLwiVKBuuzwM/nI5uoBw+gQjnf9il0fR1C3DKOI5Kc79pkJ4/xB0Uw9aFQ==} + peerDependencies: + '@fortawesome/fontawesome-svg-core': ~1 || ~6 + react: '>=16.x' + dependencies: + '@fortawesome/fontawesome-svg-core': 1.2.36 + prop-types: 15.8.1 + react: 18.2.0 + dev: false + + /@jridgewell/gen-mapping/0.1.1: + resolution: {integrity: sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==} + engines: {node: '>=6.0.0'} + dependencies: + '@jridgewell/set-array': 1.1.2 + '@jridgewell/sourcemap-codec': 1.4.14 + dev: true + + /@jridgewell/gen-mapping/0.3.2: + resolution: {integrity: sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==} + engines: {node: '>=6.0.0'} + dependencies: + '@jridgewell/set-array': 1.1.2 + '@jridgewell/sourcemap-codec': 1.4.14 + '@jridgewell/trace-mapping': 0.3.17 + + /@jridgewell/resolve-uri/3.1.0: + resolution: {integrity: sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==} + engines: {node: '>=6.0.0'} + + /@jridgewell/set-array/1.1.2: + resolution: {integrity: sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==} + engines: {node: '>=6.0.0'} + + /@jridgewell/sourcemap-codec/1.4.14: + resolution: {integrity: sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==} + + /@jridgewell/trace-mapping/0.3.17: + resolution: {integrity: sha512-MCNzAp77qzKca9+W/+I0+sEpaUnZoeasnghNeVc41VZCEKaCH73Vq3BZZ/SzWIgrqE4H4ceI+p+b6C0mHf9T4g==} + dependencies: + '@jridgewell/resolve-uri': 3.1.0 + '@jridgewell/sourcemap-codec': 1.4.14 + + /@material-ui/core/4.11.3_latq7dd42slydnarmq2qltz6ai: + resolution: {integrity: sha512-Adt40rGW6Uds+cAyk3pVgcErpzU/qxc7KBR94jFHBYretU4AtWZltYcNsbeMn9tXL86jjVL1kuGcIHsgLgFGRw==} + engines: {node: '>=8.0.0'} + deprecated: 'You can now upgrade to @mui/material. See the guide: https://mui.com/guides/migration-v4/' + peerDependencies: + '@types/react': ^16.8.6 || ^17.0.0 + react: ^16.8.0 || ^17.0.0 + react-dom: ^16.8.0 || ^17.0.0 + peerDependenciesMeta: + '@types/react': + optional: true + dependencies: + '@babel/runtime': 7.20.6 + '@material-ui/styles': 4.11.5_latq7dd42slydnarmq2qltz6ai + '@material-ui/system': 4.12.2_latq7dd42slydnarmq2qltz6ai + '@material-ui/types': 5.1.0_@types+react@17.0.50 + '@material-ui/utils': 4.11.3_biqbaboplfbrettd7655fr4n2y + '@types/react': 17.0.50 + '@types/react-transition-group': 4.4.5 + clsx: 1.2.1 + hoist-non-react-statics: 3.3.2 + popper.js: 1.16.1-lts + prop-types: 15.8.1 + react: 18.2.0 + react-dom: 18.2.0_react@18.2.0 + react-is: 16.13.1 + react-transition-group: 4.4.5_biqbaboplfbrettd7655fr4n2y + dev: false + + /@material-ui/styles/4.11.5_latq7dd42slydnarmq2qltz6ai: + resolution: {integrity: sha512-o/41ot5JJiUsIETME9wVLAJrmIWL3j0R0Bj2kCOLbSfqEkKf0fmaPt+5vtblUh5eXr2S+J/8J3DaCb10+CzPGA==} + engines: {node: '>=8.0.0'} + peerDependencies: + '@types/react': ^16.8.6 || ^17.0.0 + react: ^16.8.0 || ^17.0.0 + react-dom: ^16.8.0 || ^17.0.0 + peerDependenciesMeta: + '@types/react': + optional: true + dependencies: + '@babel/runtime': 7.20.6 + '@emotion/hash': 0.8.0 + '@material-ui/types': 5.1.0_@types+react@17.0.50 + '@material-ui/utils': 4.11.3_biqbaboplfbrettd7655fr4n2y + '@types/react': 17.0.50 + clsx: 1.2.1 + csstype: 2.6.21 + hoist-non-react-statics: 3.3.2 + jss: 10.9.2 + jss-plugin-camel-case: 10.9.2 + jss-plugin-default-unit: 10.9.2 + jss-plugin-global: 10.9.2 + jss-plugin-nested: 10.9.2 + jss-plugin-props-sort: 10.9.2 + jss-plugin-rule-value-function: 10.9.2 + jss-plugin-vendor-prefixer: 10.9.2 + prop-types: 15.8.1 + react: 18.2.0 + react-dom: 18.2.0_react@18.2.0 + dev: false + + /@material-ui/system/4.12.2_latq7dd42slydnarmq2qltz6ai: + resolution: {integrity: sha512-6CSKu2MtmiJgcCGf6nBQpM8fLkuB9F55EKfbdTC80NND5wpTmKzwdhLYLH3zL4cLlK0gVaaltW7/wMuyTnN0Lw==} + engines: {node: '>=8.0.0'} + peerDependencies: + '@types/react': ^16.8.6 || ^17.0.0 + react: ^16.8.0 || ^17.0.0 + react-dom: ^16.8.0 || ^17.0.0 + peerDependenciesMeta: + '@types/react': + optional: true + dependencies: + '@babel/runtime': 7.20.6 + '@material-ui/utils': 4.11.3_biqbaboplfbrettd7655fr4n2y + '@types/react': 17.0.50 + csstype: 2.6.21 + prop-types: 15.8.1 + react: 18.2.0 + react-dom: 18.2.0_react@18.2.0 + dev: false + + /@material-ui/types/5.1.0_@types+react@17.0.50: + resolution: {integrity: sha512-7cqRjrY50b8QzRSYyhSpx4WRw2YuO0KKIGQEVk5J8uoz2BanawykgZGoWEqKm7pVIbzFDN0SpPcVV4IhOFkl8A==} + peerDependencies: + '@types/react': '*' + peerDependenciesMeta: + '@types/react': + optional: true + dependencies: + '@types/react': 17.0.50 + dev: false + + /@material-ui/utils/4.11.3_biqbaboplfbrettd7655fr4n2y: + resolution: {integrity: sha512-ZuQPV4rBK/V1j2dIkSSEcH5uT6AaHuKWFfotADHsC0wVL1NLd2WkFCm4ZZbX33iO4ydl6V0GPngKm8HZQ2oujg==} + engines: {node: '>=8.0.0'} + peerDependencies: + react: ^16.8.0 || ^17.0.0 + react-dom: ^16.8.0 || ^17.0.0 + dependencies: + '@babel/runtime': 7.20.6 + prop-types: 15.8.1 + react: 18.2.0 + react-dom: 18.2.0_react@18.2.0 + react-is: 17.0.2 + dev: false + + /@mui/base/5.0.0-alpha.101_rj7ozvcq3uehdlnj3cbwzbi5ce: + resolution: {integrity: sha512-a54BcXvArGOKUZ2zyS/7B9GNhAGgfomEQSkfEZ88Nc9jKvXA+Mppenfz5o4JCAnD8c4VlePmz9rKOYvvum1bZw==} + engines: {node: '>=12.0.0'} + peerDependencies: + '@types/react': ^17.0.0 || ^18.0.0 + react: ^17.0.0 || ^18.0.0 + react-dom: ^17.0.0 || ^18.0.0 + peerDependenciesMeta: + '@types/react': + optional: true + dependencies: + '@babel/runtime': 7.20.6 + '@emotion/is-prop-valid': 1.2.0 + '@mui/types': 7.2.0_@types+react@18.0.21 + '@mui/utils': 5.10.9_react@18.2.0 + '@popperjs/core': 2.11.6 + '@types/react': 18.0.21 + clsx: 1.2.1 + prop-types: 15.8.1 + react: 18.2.0 + react-dom: 18.2.0_react@18.2.0 + react-is: 18.2.0 + dev: false + + /@mui/core-downloads-tracker/5.10.9: + resolution: {integrity: sha512-rqoFu4qww6KJBbXYhyRd9YXjwBHa3ylnBPSWbGf1bdfG0AYMKmVzg8zxkWvxAWOp97kvx3M2kNPb0xMIDZiogQ==} + dev: false + + /@mui/icons-material/5.10.16_6usjrp3ypnzobhq35dcwvjrt3m: + resolution: {integrity: sha512-jjCc0IF6iyLiucQCu5igg3fOscSqbbvRCmyRxXgzOcLR56B0sg2L8o+ZfJ0dAg59+wvgtXaxvjze/mJg0B4iWA==} + engines: {node: '>=12.0.0'} + peerDependencies: + '@mui/material': ^5.0.0 + '@types/react': ^17.0.0 || ^18.0.0 + react: ^17.0.0 || ^18.0.0 + peerDependenciesMeta: + '@types/react': + optional: true + dependencies: + '@babel/runtime': 7.20.6 + '@mui/material': 5.10.9_ikcgkdnp4bn3rgptamntbhbo7e + '@types/react': 18.0.21 + react: 18.2.0 + dev: false + + /@mui/material/5.10.9_ikcgkdnp4bn3rgptamntbhbo7e: + resolution: {integrity: sha512-sdOzlgpCmyw48je+E7o9UGGJpgBaF+60FlTRpVpcd/z+LUhnuzzuis891yPI5dPPXLBDL/bO4SsGg51lgNeLBw==} + engines: {node: '>=12.0.0'} + peerDependencies: + '@emotion/react': ^11.5.0 + '@emotion/styled': ^11.3.0 + '@types/react': ^17.0.0 || ^18.0.0 + react: ^17.0.0 || ^18.0.0 + react-dom: ^17.0.0 || ^18.0.0 + peerDependenciesMeta: + '@emotion/react': + optional: true + '@emotion/styled': + optional: true + '@types/react': + optional: true + dependencies: + '@babel/runtime': 7.19.4 + '@emotion/react': 11.10.4_iapumuv4e6jcjznwuxpf4tt22e + '@emotion/styled': 11.10.4_g3tud4ene45llglqap74b5kkse + '@mui/base': 5.0.0-alpha.101_rj7ozvcq3uehdlnj3cbwzbi5ce + '@mui/core-downloads-tracker': 5.10.9 + '@mui/system': 5.10.9_h33l6npc22g7vcra72cibfsrvm + '@mui/types': 7.2.0_@types+react@18.0.21 + '@mui/utils': 5.10.9_react@18.2.0 + '@types/react': 18.0.21 + '@types/react-transition-group': 4.4.5 + clsx: 1.2.1 + csstype: 3.1.1 + prop-types: 15.8.1 + react: 18.2.0 + react-dom: 18.2.0_react@18.2.0 + react-is: 18.2.0 + react-transition-group: 4.4.5_biqbaboplfbrettd7655fr4n2y + dev: false + + /@mui/private-theming/5.10.9_iapumuv4e6jcjznwuxpf4tt22e: + resolution: {integrity: sha512-BN7/CnsVPVyBaQpDTij4uV2xGYHHHhOgpdxeYLlIu+TqnsVM7wUeF+37kXvHovxM6xmL5qoaVUD98gDC0IZnHg==} + engines: {node: '>=12.0.0'} + peerDependencies: + '@types/react': ^17.0.0 || ^18.0.0 + react: ^17.0.0 || ^18.0.0 + peerDependenciesMeta: + '@types/react': + optional: true + dependencies: + '@babel/runtime': 7.20.6 + '@mui/utils': 5.10.9_react@18.2.0 + '@types/react': 18.0.21 + prop-types: 15.8.1 + react: 18.2.0 + dev: false + + /@mui/styled-engine/5.10.8_hfzxdiydbrbhhfpkwuv3jhvwmq: + resolution: {integrity: sha512-w+y8WI18EJV6zM/q41ug19cE70JTeO6sWFsQ7tgePQFpy6ToCVPh0YLrtqxUZXSoMStW5FMw0t9fHTFAqPbngw==} + engines: {node: '>=12.0.0'} + peerDependencies: + '@emotion/react': ^11.4.1 + '@emotion/styled': ^11.3.0 + react: ^17.0.0 || ^18.0.0 + peerDependenciesMeta: + '@emotion/react': + optional: true + '@emotion/styled': + optional: true + dependencies: + '@babel/runtime': 7.19.4 + '@emotion/cache': 11.10.3 + '@emotion/react': 11.10.4_iapumuv4e6jcjznwuxpf4tt22e + '@emotion/styled': 11.10.4_g3tud4ene45llglqap74b5kkse + csstype: 3.1.1 + prop-types: 15.8.1 + react: 18.2.0 + dev: false + + /@mui/system/5.10.9_h33l6npc22g7vcra72cibfsrvm: + resolution: {integrity: sha512-B6fFC0sK06hNmqY7fAUfwShQv594+u/DT1YEFHPtK4laouTu7V4vSGQWi1WJT9Bjs9Db5D1bRDJ+Yy+tc3QOYA==} + engines: {node: '>=12.0.0'} + peerDependencies: + '@emotion/react': ^11.5.0 + '@emotion/styled': ^11.3.0 + '@types/react': ^17.0.0 || ^18.0.0 + react: ^17.0.0 || ^18.0.0 + peerDependenciesMeta: + '@emotion/react': + optional: true + '@emotion/styled': + optional: true + '@types/react': + optional: true + dependencies: + '@babel/runtime': 7.20.6 + '@emotion/react': 11.10.4_iapumuv4e6jcjznwuxpf4tt22e + '@emotion/styled': 11.10.4_g3tud4ene45llglqap74b5kkse + '@mui/private-theming': 5.10.9_iapumuv4e6jcjznwuxpf4tt22e + '@mui/styled-engine': 5.10.8_hfzxdiydbrbhhfpkwuv3jhvwmq + '@mui/types': 7.2.0_@types+react@18.0.21 + '@mui/utils': 5.10.9_react@18.2.0 + '@types/react': 18.0.21 + clsx: 1.2.1 + csstype: 3.1.1 + prop-types: 15.8.1 + react: 18.2.0 + dev: false + + /@mui/types/7.2.0_@types+react@18.0.21: + resolution: {integrity: sha512-lGXtFKe5lp3UxTBGqKI1l7G8sE2xBik8qCfrLHD5olwP/YU0/ReWoWT7Lp1//ri32dK39oPMrJN8TgbkCSbsNA==} + peerDependencies: + '@types/react': '*' + peerDependenciesMeta: + '@types/react': + optional: true + dependencies: + '@types/react': 18.0.21 + dev: false + + /@mui/utils/5.10.9_react@18.2.0: + resolution: {integrity: sha512-2tdHWrq3+WCy+G6TIIaFx3cg7PorXZ71P375ExuX61od1NOAJP1mK90VxQ8N4aqnj2vmO3AQDkV4oV2Ktvt4bA==} + engines: {node: '>=12.0.0'} + peerDependencies: + react: ^17.0.0 || ^18.0.0 + dependencies: + '@babel/runtime': 7.20.6 + '@types/prop-types': 15.7.5 + '@types/react-is': 17.0.3 + prop-types: 15.8.1 + react: 18.2.0 + react-is: 18.2.0 + dev: false + + /@popperjs/core/2.11.6: + resolution: {integrity: sha512-50/17A98tWUfQ176raKiOGXuYpLyyVMkxxG6oylzL3BPOlA6ADGdK7EYunSa4I064xerltq9TGXs8HmOk5E+vw==} + dev: false + + /@protobuf-ts/grpcweb-transport/2.8.2: + resolution: {integrity: sha512-VG7132dOcH7t4C95EUiirzV2RTolBxIBWGxPFmnbKwHuR1lcSWDUbZZ1FIC1C/qip5jK4qcw5R76YZfV7gf8Pw==} + dependencies: + '@protobuf-ts/runtime': 2.8.2 + '@protobuf-ts/runtime-rpc': 2.8.2 + dev: false + + /@protobuf-ts/plugin-framework/2.8.2: + resolution: {integrity: sha512-ivcJdNVB3Iee8044f8erZGBgmB6ZfQbbKyxRgDBXRVKYxsruLr432WcT5upw9autK9OnlSVLaebi8kDneFXd2g==} + dependencies: + '@protobuf-ts/runtime': 2.8.2 + typescript: 3.9.10 + dev: true + + /@protobuf-ts/plugin/2.8.2: + resolution: {integrity: sha512-rTPxaeKBfUar8ubKxbVdv4XL6AcGA0OOgHNHFyrfODP7Epy80omwuvgFJex1YpeNFJxm/FZXXj5Z+nHuhYEqJg==} + hasBin: true + dependencies: + '@protobuf-ts/plugin-framework': 2.8.2 + '@protobuf-ts/protoc': 2.8.2 + '@protobuf-ts/runtime': 2.8.2 + '@protobuf-ts/runtime-rpc': 2.8.2 + typescript: 3.9.10 + dev: true + + /@protobuf-ts/protoc/2.8.2: + resolution: {integrity: sha512-1e+rOgp22ElyqRWunSc8bhatJcvRe90AGPceVn67IFYzybvfKl17vP1igHddeYkN0dzOucnOrwqn2v1jnDfE2w==} + hasBin: true + dev: true + + /@protobuf-ts/runtime-rpc/2.8.2: + resolution: {integrity: sha512-vum/Y7AXdUTWGFu7dke/jCSB9dV3Oo3iVPcce3j7KudpzzWarDkEGvXjKv3Y8zJPj5waToyxwBNSb7eo5Vw5WA==} + dependencies: + '@protobuf-ts/runtime': 2.8.2 + + /@protobuf-ts/runtime/2.8.2: + resolution: {integrity: sha512-PVxsH81y9kEbHldxxG/8Y3z2mTXWQytRl8zNS0mTPUjkEC+8GUX6gj6LsA8EFp25fAs9V0ruh+aNWmPccEI9MA==} + + /@react-dnd/asap/4.0.1: + resolution: {integrity: sha512-kLy0PJDDwvwwTXxqTFNAAllPHD73AycE9ypWeln/IguoGBEbvFcPDbCV03G52bEcC5E+YgupBE0VzHGdC8SIXg==} + dev: false + + /@react-dnd/invariant/2.0.0: + resolution: {integrity: sha512-xL4RCQBCBDJ+GRwKTFhGUW8GXa4yoDfJrPbLblc3U09ciS+9ZJXJ3Qrcs/x2IODOdIE5kQxvMmE2UKyqUictUw==} + dev: false + + /@react-dnd/shallowequal/2.0.0: + resolution: {integrity: sha512-Pc/AFTdwZwEKJxFJvlxrSmGe/di+aAOBn60sremrpLo6VI/6cmiUYNNwlI5KNYttg7uypzA3ILPMPgxB2GYZEg==} + dev: false + + /@reduxjs/toolkit/1.8.6_7ozmmenbreubqajvpxb2pmuuau: + resolution: {integrity: sha512-4Ia/Loc6WLmdSOzi7k5ff7dLK8CgG2b8aqpLsCAJhazAzGdp//YBUSaj0ceW6a3kDBDNRrq5CRwyCS0wBiL1ig==} + peerDependencies: + react: ^16.9.0 || ^17.0.0 || ^18 + react-redux: ^7.2.1 || ^8.0.2 + peerDependenciesMeta: + react: + optional: true + react-redux: + optional: true + dependencies: + immer: 9.0.15 + react: 18.2.0 + react-redux: 7.2.9_biqbaboplfbrettd7655fr4n2y + redux: 4.2.0 + redux-thunk: 2.4.1_redux@4.2.0 + reselect: 4.1.6 + dev: false + + /@types/classnames/2.3.1: + resolution: {integrity: sha512-zeOWb0JGBoVmlQoznvqXbE0tEC/HONsnoUNH19Hc96NFsTAwTXbTqb8FMYkru1F/iqp7a18Ws3nWJvtA1sHD1A==} + deprecated: This is a stub types definition. classnames provides its own type definitions, so you do not need this installed. + dependencies: + classnames: 2.3.2 + dev: false + + /@types/fuzzy-search/2.1.2: + resolution: {integrity: sha512-YOqA50Z3xcycm4Br5+MBUpSumfdOAcv34A8A8yFn62zBQPTzJSXQk11qYE5w8BWQ0KrVThXUgEQh7ZLrYI1NaQ==} + dev: false + + /@types/hoist-non-react-statics/3.3.1: + resolution: {integrity: sha512-iMIqiko6ooLrTh1joXodJK5X9xeEALT1kM5G3ZLhD3hszxBdIEd5C75U834D9mLcINgD4OyZf5uQXjkuYydWvA==} + dependencies: + '@types/react': 18.0.21 + hoist-non-react-statics: 3.3.2 + dev: false + + /@types/memoizee/0.4.8: + resolution: {integrity: sha512-qDpXKGgwKywnQt/64fH1O0LiPA++QGIYeykEUiZ51HymKVRLnUSGcRuF60IfpPeeXiuRwiR/W4y7S5VzbrgLCA==} + dev: false + + /@types/parse-json/4.0.0: + resolution: {integrity: sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==} + dev: false + + /@types/prop-types/15.7.5: + resolution: {integrity: sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==} + + /@types/react-dom/18.0.6: + resolution: {integrity: sha512-/5OFZgfIPSwy+YuIBP/FgJnQnsxhZhjjrnxudMddeblOouIodEQ75X14Rr4wGSG/bknL+Omy9iWlLo1u/9GzAA==} + dependencies: + '@types/react': 18.0.21 + dev: true + + /@types/react-is/17.0.3: + resolution: {integrity: sha512-aBTIWg1emtu95bLTLx0cpkxwGW3ueZv71nE2YFBpL8k/z5czEW8yYpOo8Dp+UUAFAtKwNaOsh/ioSeQnWlZcfw==} + dependencies: + '@types/react': 18.0.21 + dev: false + + /@types/react-redux/7.1.24: + resolution: {integrity: sha512-7FkurKcS1k0FHZEtdbbgN8Oc6b+stGSfZYjQGicofJ0j4U0qIn/jaSvnP2pLwZKiai3/17xqqxkkrxTgN8UNbQ==} + dependencies: + '@types/hoist-non-react-statics': 3.3.1 + '@types/react': 18.0.21 + hoist-non-react-statics: 3.3.2 + redux: 4.2.0 + dev: false + + /@types/react-transition-group/4.4.5: + resolution: {integrity: sha512-juKD/eiSM3/xZYzjuzH6ZwpP+/lejltmiS3QEzV/vmb/Q8+HfDmxu+Baga8UEMGBqV88Nbg4l2hY/K2DkyaLLA==} + dependencies: + '@types/react': 18.0.21 + dev: false + + /@types/react-virtualized-auto-sizer/1.0.1: + resolution: {integrity: sha512-GH8sAnBEM5GV9LTeiz56r4ZhMOUSrP43tAQNSRVxNexDjcNKLCEtnxusAItg1owFUFE6k0NslV26gqVClVvong==} + dependencies: + '@types/react': 18.0.21 + dev: false + + /@types/react-window/1.8.5: + resolution: {integrity: sha512-V9q3CvhC9Jk9bWBOysPGaWy/Z0lxYcTXLtLipkt2cnRj1JOSFNF7wqGpkScSXMgBwC+fnVRg/7shwgddBG5ICw==} + dependencies: + '@types/react': 18.0.21 + dev: false + + /@types/react/17.0.50: + resolution: {integrity: sha512-ZCBHzpDb5skMnc1zFXAXnL3l1FAdi+xZvwxK+PkglMmBrwjpp9nKaWuEvrGnSifCJmBFGxZOOFuwC6KH/s0NuA==} + dependencies: + '@types/prop-types': 15.7.5 + '@types/scheduler': 0.16.2 + csstype: 3.1.1 + dev: false + + /@types/react/18.0.21: + resolution: {integrity: sha512-7QUCOxvFgnD5Jk8ZKlUAhVcRj7GuJRjnjjiY/IUBWKgOlnvDvTMLD4RTF7NPyVmbRhNrbomZiOepg7M/2Kj1mA==} + dependencies: + '@types/prop-types': 15.7.5 + '@types/scheduler': 0.16.2 + csstype: 3.1.1 + + /@types/redux-watch/1.1.0: + resolution: {integrity: sha512-bTeEMsDaGd2JwZP4r2pkElBndC76TGeCcZG3sbeSt28P4GCTmZdjk898+0z3aePYt63lUluPRbGegDiN++Hihg==} + dev: false + + /@types/scheduler/0.16.2: + resolution: {integrity: sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==} + + /@types/shortid/0.0.29: + resolution: {integrity: sha512-9BCYD9btg2CY4kPcpMQ+vCR8U6V8f/KvixYD5ZbxoWlkhddNF5IeZMVL3p+QFUkg+Hb+kPAG9Jgk4bnnF1v/Fw==} + dev: false + + /@vitejs/plugin-react/2.1.0_vite@3.1.8: + resolution: {integrity: sha512-am6rPyyU3LzUYne3Gd9oj9c4Rzbq5hQnuGXSMT6Gujq45Il/+bunwq3lrB7wghLkiF45ygMwft37vgJ/NE8IAA==} + engines: {node: ^14.18.0 || >=16.0.0} + peerDependencies: + vite: ^3.0.0 + dependencies: + '@babel/core': 7.19.3 + '@babel/plugin-transform-react-jsx': 7.19.0_@babel+core@7.19.3 + '@babel/plugin-transform-react-jsx-development': 7.18.6_@babel+core@7.19.3 + '@babel/plugin-transform-react-jsx-self': 7.18.6_@babel+core@7.19.3 + '@babel/plugin-transform-react-jsx-source': 7.18.6_@babel+core@7.19.3 + magic-string: 0.26.7 + react-refresh: 0.14.0 + vite: 3.1.8_less@4.1.3 + transitivePeerDependencies: + - supports-color + dev: true + + /ansi-styles/3.2.1: + resolution: {integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==} + engines: {node: '>=4'} + dependencies: + color-convert: 1.9.3 + + /babel-plugin-macros/3.1.0: + resolution: {integrity: sha512-Cg7TFGpIr01vOQNODXOOaGz2NpCU5gl8x1qJFbb6hbZxR7XrcE2vtbAsTAbJ7/xwJtUuJEw8K8Zr/AE0LHlesg==} + engines: {node: '>=10', npm: '>=6'} + dependencies: + '@babel/runtime': 7.20.6 + cosmiconfig: 7.0.1 + resolve: 1.22.1 + dev: false + + /babel-plugin-styled-components/2.0.7_styled-components@5.3.6: + resolution: {integrity: sha512-i7YhvPgVqRKfoQ66toiZ06jPNA3p6ierpfUuEWxNF+fV27Uv5gxBkf8KZLHUCc1nFA9j6+80pYoIpqCeyW3/bA==} + peerDependencies: + styled-components: '>= 2' + dependencies: + '@babel/helper-annotate-as-pure': 7.18.6 + '@babel/helper-module-imports': 7.18.6 + babel-plugin-syntax-jsx: 6.18.0 + lodash: 4.17.21 + picomatch: 2.3.1 + styled-components: 5.3.6_7i5myeigehqah43i5u7wbekgba + dev: false + + /babel-plugin-syntax-jsx/6.18.0: + resolution: {integrity: sha512-qrPaCSo9c8RHNRHIotaufGbuOBN8rtdC4QrrFFc43vyWCCz7Kl7GL1PGaXtMGQZUXrkCjNEgxDfmAuAabr/rlw==} + dev: false + + /browserslist/4.21.4: + resolution: {integrity: sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw==} + engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} + hasBin: true + dependencies: + caniuse-lite: 1.0.30001420 + electron-to-chromium: 1.4.283 + node-releases: 2.0.6 + update-browserslist-db: 1.0.10_browserslist@4.21.4 + dev: true + + /callsites/3.1.0: + resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} + engines: {node: '>=6'} + dev: false + + /camelize/1.0.1: + resolution: {integrity: sha512-dU+Tx2fsypxTgtLoE36npi3UqcjSSMNYfkqgmoEhtZrraP5VWq0K7FkWVTYa8eMPtnU/G2txVsfdCJTn9uzpuQ==} + dev: false + + /caniuse-lite/1.0.30001420: + resolution: {integrity: sha512-OnyeJ9ascFA9roEj72ok2Ikp7PHJTKubtEJIQ/VK3fdsS50q4KWy+Z5X0A1/GswEItKX0ctAp8n4SYDE7wTu6A==} + dev: true + + /chalk/2.4.2: + resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} + engines: {node: '>=4'} + dependencies: + ansi-styles: 3.2.1 + escape-string-regexp: 1.0.5 + supports-color: 5.5.0 + + /chonky-icon-fontawesome/2.3.2_qge232svojtclvlhf4p5efbcfu: + resolution: {integrity: sha512-19Duy25JxteIlQJfZKLH7ZSujU1zYBMLKovWDOLlw1EdPlXb9exsfYUUkwfNS+31KpwttfaqnGuN8Gyed/R6UQ==} + engines: {node: '>=10'} + peerDependencies: + react: '>=16' + dependencies: + '@fortawesome/fontawesome-svg-core': 1.2.36 + '@fortawesome/free-brands-svg-icons': 5.13.1 + '@fortawesome/free-solid-svg-icons': 5.15.4 + '@fortawesome/react-fontawesome': 0.1.19_itb73sutiv5dicgxanxbjij2uu + chonky: 2.3.2_qge232svojtclvlhf4p5efbcfu + react: 18.2.0 + transitivePeerDependencies: + - react-dom + - react-is + - react-native + - typescript + dev: false + + /chonky/2.3.2_qge232svojtclvlhf4p5efbcfu: + resolution: {integrity: sha512-ed2u+SEjEPSn8bv/zC0sXfMG/XS6Ydm4J2leLCvRb7a/2BZxKqE1DFETxqfLeJ2OA1IRujvwnuvmJO8ZgyOGyA==} + engines: {node: '>=10'} + peerDependencies: + react: '>=16' + dependencies: + '@material-ui/core': 4.11.3_latq7dd42slydnarmq2qltz6ai + '@reduxjs/toolkit': 1.8.6_7ozmmenbreubqajvpxb2pmuuau + '@types/classnames': 2.3.1 + '@types/fuzzy-search': 2.1.2 + '@types/memoizee': 0.4.8 + '@types/react': 17.0.50 + '@types/react-redux': 7.1.24 + '@types/react-virtualized-auto-sizer': 1.0.1 + '@types/react-window': 1.8.5 + '@types/redux-watch': 1.1.0 + '@types/shortid': 0.0.29 + classnames: 2.3.2 + deepmerge: 4.2.2 + exact-trie: 1.0.13 + fast-sort: 2.2.0 + filesize: 6.4.0 + fuzzy-search: 3.2.1 + hotkeys-js: 3.10.0 + react: 18.2.0 + react-dnd: 11.1.3_biqbaboplfbrettd7655fr4n2y + react-dnd-html5-backend: 11.1.3 + react-intl: 5.25.1_uz2ogxeagc4rbieizlfzyaazee + react-jss: 10.9.2_react@18.2.0 + react-redux: 7.2.9_biqbaboplfbrettd7655fr4n2y + react-virtualized-auto-sizer: 1.0.7_biqbaboplfbrettd7655fr4n2y + react-window: 1.8.7_biqbaboplfbrettd7655fr4n2y + redux-watch: 1.2.0 + shortid: 2.2.16 + styled-components: 5.3.6_7i5myeigehqah43i5u7wbekgba + tsdef: 0.0.14 + transitivePeerDependencies: + - react-dom + - react-is + - react-native + - typescript + dev: false + + /classnames/2.3.2: + resolution: {integrity: sha512-CSbhY4cFEJRe6/GQzIk5qXZ4Jeg5pcsP7b5peFSDpffpe1cqjASH/n9UTjBwOp6XpMSTwQ8Za2K5V02ueA7Tmw==} + dev: false + + /clsx/1.2.1: + resolution: {integrity: sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==} + engines: {node: '>=6'} + dev: false + + /color-convert/1.9.3: + resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==} + dependencies: + color-name: 1.1.3 + + /color-name/1.1.3: + resolution: {integrity: sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==} + + /convert-source-map/1.9.0: + resolution: {integrity: sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==} + + /copy-anything/2.0.6: + resolution: {integrity: sha512-1j20GZTsvKNkc4BY3NpMOM8tt///wY3FpIzozTOFO2ffuZcV61nojHXVKIy3WM+7ADCy5FVhdZYHYDdgTU0yJw==} + dependencies: + is-what: 3.14.1 + dev: true + + /cosmiconfig/7.0.1: + resolution: {integrity: sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ==} + engines: {node: '>=10'} + dependencies: + '@types/parse-json': 4.0.0 + import-fresh: 3.3.0 + parse-json: 5.2.0 + path-type: 4.0.0 + yaml: 1.10.2 + dev: false + + /css-color-keywords/1.0.0: + resolution: {integrity: sha512-FyyrDHZKEjXDpNJYvVsV960FiqQyXc/LlYmsxl2BcdMb2WPx0OGRVgTg55rPSyLSNMqP52R9r8geSp7apN3Ofg==} + engines: {node: '>=4'} + dev: false + + /css-jss/10.9.2: + resolution: {integrity: sha512-85P3X4lr2MkPBRkk/cn5AQTy5WHyHOBtQPnWRSYx6F3m0O4pVKexpFKdCDDoAgihp6cwYCf38eGVwU/ssZDSjA==} + dependencies: + '@babel/runtime': 7.20.6 + jss: 10.9.2 + jss-preset-default: 10.9.2 + dev: false + + /css-to-react-native/3.0.0: + resolution: {integrity: sha512-Ro1yETZA813eoyUp2GDBhG2j+YggidUmzO1/v9eYBKR2EHVEniE2MI/NqpTQ954BMpTPZFsGNPm46qFB9dpaPQ==} + dependencies: + camelize: 1.0.1 + css-color-keywords: 1.0.0 + postcss-value-parser: 4.2.0 + dev: false + + /css-vendor/2.0.8: + resolution: {integrity: sha512-x9Aq0XTInxrkuFeHKbYC7zWY8ai7qJ04Kxd9MnvbC1uO5DagxoHQjm4JvG+vCdXOoFtCjbL2XSZfxmoYa9uQVQ==} + dependencies: + '@babel/runtime': 7.20.6 + is-in-browser: 1.1.3 + dev: false + + /csstype/2.6.21: + resolution: {integrity: sha512-Z1PhmomIfypOpoMjRQB70jfvy/wxT50qW08YXO5lMIJkrdq4yOTR+AW7FqutScmB9NkLwxo+jU+kZLbofZZq/w==} + dev: false + + /csstype/3.1.1: + resolution: {integrity: sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw==} + + /debug/3.2.7: + resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.1.3 + dev: true + optional: true + + /debug/4.3.4: + resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.1.2 + dev: true + + /debug/4.3.4_supports-color@5.5.0: + resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.1.2 + supports-color: 5.5.0 + dev: false + + /deepmerge/4.2.2: + resolution: {integrity: sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==} + engines: {node: '>=0.10.0'} + dev: false + + /dnd-core/11.1.3: + resolution: {integrity: sha512-QugF55dNW+h+vzxVJ/LSJeTeUw9MCJ2cllhmVThVPEtF16ooBkxj0WBE5RB+AceFxMFo1rO6bJKXtqKl+JNnyA==} + dependencies: + '@react-dnd/asap': 4.0.1 + '@react-dnd/invariant': 2.0.0 + redux: 4.2.0 + dev: false + + /dom-helpers/5.2.1: + resolution: {integrity: sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA==} + dependencies: + '@babel/runtime': 7.20.6 + csstype: 3.1.1 + dev: false + + /electron-to-chromium/1.4.283: + resolution: {integrity: sha512-g6RQ9zCOV+U5QVHW9OpFR7rdk/V7xfopNXnyAamdpFgCHgZ1sjI8VuR1+zG2YG/TZk+tQ8mpNkug4P8FU0fuOA==} + dev: true + + /errno/0.1.8: + resolution: {integrity: sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A==} + hasBin: true + requiresBuild: true + dependencies: + prr: 1.0.1 + dev: true + optional: true + + /error-ex/1.3.2: + resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} + dependencies: + is-arrayish: 0.2.1 + dev: false + + /esbuild-android-64/0.15.11: + resolution: {integrity: sha512-rrwoXEiuI1kaw4k475NJpexs8GfJqQUKcD08VR8sKHmuW9RUuTR2VxcupVvHdiGh9ihxL9m3lpqB1kju92Ialw==} + engines: {node: '>=12'} + cpu: [x64] + os: [android] + requiresBuild: true + dev: true + optional: true + + /esbuild-android-arm64/0.15.11: + resolution: {integrity: sha512-/hDubOg7BHOhUUsT8KUIU7GfZm5bihqssvqK5PfO4apag7YuObZRZSzViyEKcFn2tPeHx7RKbSBXvAopSHDZJQ==} + engines: {node: '>=12'} + cpu: [arm64] + os: [android] + requiresBuild: true + dev: true + optional: true + + /esbuild-darwin-64/0.15.11: + resolution: {integrity: sha512-1DqHD0ms3AhiwkKnjRUzmiW7JnaJJr5FKrPiR7xuyMwnjDqvNWDdMq4rKSD9OC0piFNK6n0LghsglNMe2MwJtA==} + engines: {node: '>=12'} + cpu: [x64] + os: [darwin] + requiresBuild: true + dev: true + optional: true + + /esbuild-darwin-arm64/0.15.11: + resolution: {integrity: sha512-OMzhxSbS0lwwrW40HHjRCeVIJTURdXFA8c3GU30MlHKuPCcvWNUIKVucVBtNpJySXmbkQMDJdJNrXzNDyvoqvQ==} + engines: {node: '>=12'} + cpu: [arm64] + os: [darwin] + requiresBuild: true + dev: true + optional: true + + /esbuild-freebsd-64/0.15.11: + resolution: {integrity: sha512-8dKP26r0/Qyez8nTCwpq60QbuYKOeBygdgOAWGCRalunyeqWRoSZj9TQjPDnTTI9joxd3QYw3UhVZTKxO9QdRg==} + engines: {node: '>=12'} + cpu: [x64] + os: [freebsd] + requiresBuild: true + dev: true + optional: true + + /esbuild-freebsd-arm64/0.15.11: + resolution: {integrity: sha512-aSGiODiukLGGnSg/O9+cGO2QxEacrdCtCawehkWYTt5VX1ni2b9KoxpHCT9h9Y6wGqNHmXFnB47RRJ8BIqZgmQ==} + engines: {node: '>=12'} + cpu: [arm64] + os: [freebsd] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-32/0.15.11: + resolution: {integrity: sha512-lsrAfdyJBGx+6aHIQmgqUonEzKYeBnyfJPkT6N2dOf1RoXYYV1BkWB6G02tjsrz1d5wZzaTc3cF+TKmuTo/ZwA==} + engines: {node: '>=12'} + cpu: [ia32] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-64/0.15.11: + resolution: {integrity: sha512-Y2Rh+PcyVhQqXKBTacPCltINN3uIw2xC+dsvLANJ1SpK5NJUtxv8+rqWpjmBgaNWKQT1/uGpMmA9olALy9PLVA==} + engines: {node: '>=12'} + cpu: [x64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-arm/0.15.11: + resolution: {integrity: sha512-TJllTVk5aSyqPFvvcHTvf6Wu1ZKhWpJ/qNmZO8LL/XeB+LXCclm7HQHNEIz6MT7IX8PmlC1BZYrOiw2sXSB95A==} + engines: {node: '>=12'} + cpu: [arm] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-arm64/0.15.11: + resolution: {integrity: sha512-uhcXiTwTmD4OpxJu3xC5TzAAw6Wzf9O1XGWL448EE9bqGjgV1j+oK3lIHAfsHnuIn8K4nDW8yjX0Sv5S++oRuw==} + engines: {node: '>=12'} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-mips64le/0.15.11: + resolution: {integrity: sha512-WD61y/R1M4BLe4gxXRypoQ0Ci+Vjf714QYzcPNkiYv5I8K8WDz2ZR8Bm6cqKxd6rD+e/rZgPDbhQ9PCf7TMHmA==} + engines: {node: '>=12'} + cpu: [mips64el] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-ppc64le/0.15.11: + resolution: {integrity: sha512-JVleZS9oPVLTlBhPTWgOwxFWU/wMUdlBwTbGA4GF8c38sLbS13cupj+C8bLq929jU7EMWry4SaL+tKGIaTlqKg==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-riscv64/0.15.11: + resolution: {integrity: sha512-9aLIalZ2HFHIOZpmVU11sEAS9F8TnHw49daEjcgMpBXHFF57VuT9f9/9LKJhw781Gda0P9jDkuCWJ0tFbErvJw==} + engines: {node: '>=12'} + cpu: [riscv64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-s390x/0.15.11: + resolution: {integrity: sha512-sZHtiXXOKsLI3XGBGoYO4qKBzJlb8xNsWmvFiwFMHFzA4AXgDP1KDp7Dawe9C2pavTRBDvl+Ok4n/DHQ59oaTg==} + engines: {node: '>=12'} + cpu: [s390x] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-netbsd-64/0.15.11: + resolution: {integrity: sha512-hUC9yN06K9sg7ju4Vgu9ChAPdsEgtcrcLfyNT5IKwKyfpLvKUwCMZSdF+gRD3WpyZelgTQfJ+pDx5XFbXTlB0A==} + engines: {node: '>=12'} + cpu: [x64] + os: [netbsd] + requiresBuild: true + dev: true + optional: true + + /esbuild-openbsd-64/0.15.11: + resolution: {integrity: sha512-0bBo9SQR4t66Wd91LGMAqmWorzO0TTzVjYiifwoFtel8luFeXuPThQnEm5ztN4g0fnvcp7AnUPPzS/Depf17wQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [openbsd] + requiresBuild: true + dev: true + optional: true + + /esbuild-sunos-64/0.15.11: + resolution: {integrity: sha512-EuBdTGlsMTjEl1sQnBX2jfygy7iR6CKfvOzi+gEOfhDqbHXsmY1dcpbVtcwHAg9/2yUZSfMJHMAgf1z8M4yyyw==} + engines: {node: '>=12'} + cpu: [x64] + os: [sunos] + requiresBuild: true + dev: true + optional: true + + /esbuild-windows-32/0.15.11: + resolution: {integrity: sha512-O0/Wo1Wk6dc0rZSxkvGpmTNIycEznHmkObTFz2VHBhjPsO4ZpCgfGxNkCpz4AdAIeMczpTXt/8d5vdJNKEGC+Q==} + engines: {node: '>=12'} + cpu: [ia32] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /esbuild-windows-64/0.15.11: + resolution: {integrity: sha512-x977Q4HhNjnHx00b4XLAnTtj5vfbdEvkxaQwC1Zh5AN8g5EX+izgZ6e5QgqJgpzyRNJqh4hkgIJF1pyy1be0mQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /esbuild-windows-arm64/0.15.11: + resolution: {integrity: sha512-VwUHFACuBahrvntdcMKZteUZ9HaYrBRODoKe4tIWxguQRvvYoYb7iu5LrcRS/FQx8KPZNaa72zuqwVtHeXsITw==} + engines: {node: '>=12'} + cpu: [arm64] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /esbuild/0.15.11: + resolution: {integrity: sha512-OgHGuhlfZ//mToxjte1D5iiiQgWfJ2GByVMwEC/IuoXsBGkuyK1+KrjYu0laSpnN/L1UmLUCv0s25vObdc1bVg==} + engines: {node: '>=12'} + hasBin: true + requiresBuild: true + optionalDependencies: + '@esbuild/android-arm': 0.15.11 + '@esbuild/linux-loong64': 0.15.11 + esbuild-android-64: 0.15.11 + esbuild-android-arm64: 0.15.11 + esbuild-darwin-64: 0.15.11 + esbuild-darwin-arm64: 0.15.11 + esbuild-freebsd-64: 0.15.11 + esbuild-freebsd-arm64: 0.15.11 + esbuild-linux-32: 0.15.11 + esbuild-linux-64: 0.15.11 + esbuild-linux-arm: 0.15.11 + esbuild-linux-arm64: 0.15.11 + esbuild-linux-mips64le: 0.15.11 + esbuild-linux-ppc64le: 0.15.11 + esbuild-linux-riscv64: 0.15.11 + esbuild-linux-s390x: 0.15.11 + esbuild-netbsd-64: 0.15.11 + esbuild-openbsd-64: 0.15.11 + esbuild-sunos-64: 0.15.11 + esbuild-windows-32: 0.15.11 + esbuild-windows-64: 0.15.11 + esbuild-windows-arm64: 0.15.11 + dev: true + + /escalade/3.1.1: + resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==} + engines: {node: '>=6'} + dev: true + + /escape-string-regexp/1.0.5: + resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==} + engines: {node: '>=0.8.0'} + + /escape-string-regexp/4.0.0: + resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==} + engines: {node: '>=10'} + dev: false + + /exact-trie/1.0.13: + resolution: {integrity: sha512-2N0sx9jMlzZxRmSOpFKmcuaPcLXYLGRp69DohigW5E7R/uo9i6S1zJ/PuAckf70099am1ts7YBRMLO8Nr8AJLg==} + dev: false + + /fast-sort/2.2.0: + resolution: {integrity: sha512-W7zqnn2zsYoQA87FKmYtgOsbJohOrh7XrtZrCVHN5XZKqTBTv5UG+rSS3+iWbg/nepRQUOu+wnas8BwtK8kiCg==} + dev: false + + /fast-text-encoding/1.0.6: + resolution: {integrity: sha512-VhXlQgj9ioXCqGstD37E/HBeqEGV/qOD/kmbVG8h5xKBYvM1L3lR1Zn4555cQ8GkYbJa8aJSipLPndE1k6zK2w==} + dev: false + + /filesize/10.0.5: + resolution: {integrity: sha512-qrzyt8gLh86nsyYiC3ibI5KyIYRCWg2yqIklYrWF4a0qNfekik4OQfn7AoPJG2hRrPMSlH6fET4VEITweZAzjA==} + engines: {node: '>= 14.0.0'} + dev: false + + /filesize/6.4.0: + resolution: {integrity: sha512-mjFIpOHC4jbfcTfoh4rkWpI31mF7viw9ikj/JyLoKzqlwG/YsefKfvYlYhdYdg/9mtK2z1AzgN/0LvVQ3zdlSQ==} + engines: {node: '>= 0.4.0'} + dev: false + + /find-root/1.1.0: + resolution: {integrity: sha512-NKfW6bec6GfKc0SGx1e07QZY9PE99u0Bft/0rzSD5k3sO/vwkVUpDUKVm5Gpp5Ue3YfShPFTX2070tDs5kB9Ng==} + dev: false + + /fsevents/2.3.2: + resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + requiresBuild: true + dev: true + optional: true + + /function-bind/1.1.1: + resolution: {integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==} + + /fuzzy-search/3.2.1: + resolution: {integrity: sha512-vAcPiyomt1ioKAsAL2uxSABHJ4Ju/e4UeDM+g1OlR0vV4YhLGMNsdLNvZTpEDY4JCSt0E4hASCNM5t2ETtsbyg==} + dev: false + + /gensync/1.0.0-beta.2: + resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} + engines: {node: '>=6.9.0'} + dev: true + + /globals/11.12.0: + resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==} + engines: {node: '>=4'} + + /graceful-fs/4.2.10: + resolution: {integrity: sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==} + requiresBuild: true + dev: true + optional: true + + /has-flag/3.0.0: + resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==} + engines: {node: '>=4'} + + /has/1.0.3: + resolution: {integrity: sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==} + engines: {node: '>= 0.4.0'} + dependencies: + function-bind: 1.1.1 + + /hoist-non-react-statics/3.3.2: + resolution: {integrity: sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==} + dependencies: + react-is: 16.13.1 + dev: false + + /hotkeys-js/3.10.0: + resolution: {integrity: sha512-20xeVdOqcgTkMox0+BqFwADZP7+5dy/9CFPpAinSMh2d0s3b0Hs2V2D+lMh4Hphkf7VE9pwnOl58eP1te+REcg==} + dev: false + + /hyphenate-style-name/1.0.4: + resolution: {integrity: sha512-ygGZLjmXfPHj+ZWh6LwbC37l43MhfztxetbFCoYTM2VjkIUpeHgSNn7QIyVFj7YQ1Wl9Cbw5sholVJPzWvC2MQ==} + dev: false + + /iconv-lite/0.6.3: + resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} + engines: {node: '>=0.10.0'} + dependencies: + safer-buffer: 2.1.2 + dev: true + optional: true + + /image-size/0.5.5: + resolution: {integrity: sha512-6TDAlDPZxUFCv+fuOkIoXT/V/f3Qbq8e37p+YOiYrUv3v9cc3/6x78VdfPgFVaB9dZYeLUfKgHRebpkm/oP2VQ==} + engines: {node: '>=0.10.0'} + hasBin: true + requiresBuild: true + dev: true + optional: true + + /immer/9.0.15: + resolution: {integrity: sha512-2eB/sswms9AEUSkOm4SbV5Y7Vmt/bKRwByd52jfLkW4OLYeaTP3EEiJ9agqU0O/tq6Dk62Zfj+TJSqfm1rLVGQ==} + dev: false + + /import-fresh/3.3.0: + resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==} + engines: {node: '>=6'} + dependencies: + parent-module: 1.0.1 + resolve-from: 4.0.0 + dev: false + + /intl-messageformat/9.13.0: + resolution: {integrity: sha512-7sGC7QnSQGa5LZP7bXLDhVDtQOeKGeBFGHF2Y8LVBwYZoQZCgWeKoPGTa5GMG8g/TzDgeXuYJQis7Ggiw2xTOw==} + dependencies: + '@formatjs/ecma402-abstract': 1.11.4 + '@formatjs/fast-memoize': 1.2.1 + '@formatjs/icu-messageformat-parser': 2.1.0 + tslib: 2.4.0 + dev: false + + /is-arrayish/0.2.1: + resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} + dev: false + + /is-core-module/2.10.0: + resolution: {integrity: sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg==} + dependencies: + has: 1.0.3 + + /is-in-browser/1.1.3: + resolution: {integrity: sha512-FeXIBgG/CPGd/WUxuEyvgGTEfwiG9Z4EKGxjNMRqviiIIfsmgrpnHLffEDdwUHqNva1VEW91o3xBT/m8Elgl9g==} + dev: false + + /is-what/3.14.1: + resolution: {integrity: sha512-sNxgpk9793nzSs7bA6JQJGeIuRBQhAaNGG77kzYQgMkrID+lS6SlK07K5LaptscDlSaIgH+GPFzf+d75FVxozA==} + dev: true + + /js-tokens/4.0.0: + resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} + + /jsesc/2.5.2: + resolution: {integrity: sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==} + engines: {node: '>=4'} + hasBin: true + + /json-parse-even-better-errors/2.3.1: + resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} + dev: false + + /json5/2.2.1: + resolution: {integrity: sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==} + engines: {node: '>=6'} + hasBin: true + dev: true + + /jss-plugin-camel-case/10.9.2: + resolution: {integrity: sha512-wgBPlL3WS0WDJ1lPJcgjux/SHnDuu7opmgQKSraKs4z8dCCyYMx9IDPFKBXQ8Q5dVYij1FFV0WdxyhuOOAXuTg==} + dependencies: + '@babel/runtime': 7.20.6 + hyphenate-style-name: 1.0.4 + jss: 10.9.2 + dev: false + + /jss-plugin-compose/10.9.2: + resolution: {integrity: sha512-XvsGar4D791VgOMqbEk1XYY5s84Ew57OMLYjbnTXl3MSaBTvmR5M3dTqQbBquJGp4pLyyqlfTcijfolZII1w4Q==} + dependencies: + '@babel/runtime': 7.20.6 + jss: 10.9.2 + tiny-warning: 1.0.3 + dev: false + + /jss-plugin-default-unit/10.9.2: + resolution: {integrity: sha512-pYg0QX3bBEFtTnmeSI3l7ad1vtHU42YEEpgW7pmIh+9pkWNWb5dwS/4onSfAaI0kq+dOZHzz4dWe+8vWnanoSg==} + dependencies: + '@babel/runtime': 7.20.6 + jss: 10.9.2 + dev: false + + /jss-plugin-expand/10.9.2: + resolution: {integrity: sha512-D3PGLUJu3YbHhX6vANooCa1gqfv68wLssDp08wH21YVTCt6u8jWfqkoj9NmPz5ea1Fc0WMoJtofJTlgb6ApZvw==} + dependencies: + '@babel/runtime': 7.20.6 + jss: 10.9.2 + dev: false + + /jss-plugin-extend/10.9.2: + resolution: {integrity: sha512-55lXOQ7yyRtrMRYOcWUeZ3Ea4/Cd/oTndvb9j/5O3+E816nRoYbDsqKI8ob0Yx8PnfHBsQN0u10JcLnqq+8aPA==} + dependencies: + '@babel/runtime': 7.20.6 + jss: 10.9.2 + tiny-warning: 1.0.3 + dev: false + + /jss-plugin-global/10.9.2: + resolution: {integrity: sha512-GcX0aE8Ef6AtlasVrafg1DItlL/tWHoC4cGir4r3gegbWwF5ZOBYhx04gurPvWHC8F873aEGqge7C17xpwmp2g==} + dependencies: + '@babel/runtime': 7.20.6 + jss: 10.9.2 + dev: false + + /jss-plugin-nested/10.9.2: + resolution: {integrity: sha512-VgiOWIC6bvgDaAL97XCxGD0BxOKM0K0zeB/ECyNaVF6FqvdGB9KBBWRdy2STYAss4VVA7i5TbxFZN+WSX1kfQA==} + dependencies: + '@babel/runtime': 7.20.6 + jss: 10.9.2 + tiny-warning: 1.0.3 + dev: false + + /jss-plugin-props-sort/10.9.2: + resolution: {integrity: sha512-AP1AyUTbi2szylgr+O0OB7gkIxEGzySLITZ2GpsaoX72YMCGI2jYAc+WUhPfvUnZYiauF4zTnN4V4TGuvFjJlw==} + dependencies: + '@babel/runtime': 7.20.6 + jss: 10.9.2 + dev: false + + /jss-plugin-rule-value-function/10.9.2: + resolution: {integrity: sha512-vf5ms8zvLFMub6swbNxvzsurHfUZ5Shy5aJB2gIpY6WNA3uLinEcxYyraQXItRHi5ivXGqYciFDRM2ZoVoRZ4Q==} + dependencies: + '@babel/runtime': 7.20.6 + jss: 10.9.2 + tiny-warning: 1.0.3 + dev: false + + /jss-plugin-rule-value-observable/10.9.2: + resolution: {integrity: sha512-SSqBD4s/k2mwsOUg6+LI/oEmvUxokIWi+5bZOyer/2nP4kMxeo3gHURc2yiAEu3v62Sf3GHhgC6pBAugxhLL9A==} + dependencies: + '@babel/runtime': 7.20.6 + jss: 10.9.2 + symbol-observable: 1.2.0 + dev: false + + /jss-plugin-template/10.9.2: + resolution: {integrity: sha512-f+ANQg8n+tkYea4Fu1Qt4skv9UJiNllzJB1Ga0QyumDZZJVpCzl8/mbFT1cvQSW7h+yB31GexFYVF3ct5fFGzA==} + dependencies: + '@babel/runtime': 7.20.6 + jss: 10.9.2 + tiny-warning: 1.0.3 + dev: false + + /jss-plugin-vendor-prefixer/10.9.2: + resolution: {integrity: sha512-SxcEoH+Rttf9fEv6KkiPzLdXRmI6waOTcMkbbEFgdZLDYNIP9UKNHFy6thhbRKqv0XMQZdrEsbDyV464zE/dUA==} + dependencies: + '@babel/runtime': 7.20.6 + css-vendor: 2.0.8 + jss: 10.9.2 + dev: false + + /jss-preset-default/10.9.2: + resolution: {integrity: sha512-HMkTLgYPRjwdiu8n8ZB2VEfpO95cZ5AmmSYvFSvt76MQJMp/xDh6cP49MmZHwMpIvXwNj0LI8Peglgwz+rHKBQ==} + dependencies: + '@babel/runtime': 7.20.6 + jss: 10.9.2 + jss-plugin-camel-case: 10.9.2 + jss-plugin-compose: 10.9.2 + jss-plugin-default-unit: 10.9.2 + jss-plugin-expand: 10.9.2 + jss-plugin-extend: 10.9.2 + jss-plugin-global: 10.9.2 + jss-plugin-nested: 10.9.2 + jss-plugin-props-sort: 10.9.2 + jss-plugin-rule-value-function: 10.9.2 + jss-plugin-rule-value-observable: 10.9.2 + jss-plugin-template: 10.9.2 + jss-plugin-vendor-prefixer: 10.9.2 + dev: false + + /jss/10.9.2: + resolution: {integrity: sha512-b8G6rWpYLR4teTUbGd4I4EsnWjg7MN0Q5bSsjKhVkJVjhQDy2KzkbD2AW3TuT0RYZVmZZHKIrXDn6kjU14qkUg==} + dependencies: + '@babel/runtime': 7.20.6 + csstype: 3.1.1 + is-in-browser: 1.1.3 + tiny-warning: 1.0.3 + dev: false + + /less/4.1.3: + resolution: {integrity: sha512-w16Xk/Ta9Hhyei0Gpz9m7VS8F28nieJaL/VyShID7cYvP6IL5oHeL6p4TXSDJqZE/lNv0oJ2pGVjJsRkfwm5FA==} + engines: {node: '>=6'} + hasBin: true + dependencies: + copy-anything: 2.0.6 + parse-node-version: 1.0.1 + tslib: 2.4.0 + optionalDependencies: + errno: 0.1.8 + graceful-fs: 4.2.10 + image-size: 0.5.5 + make-dir: 2.1.0 + mime: 1.6.0 + needle: 3.1.0 + source-map: 0.6.1 + transitivePeerDependencies: + - supports-color + dev: true + + /lines-and-columns/1.2.4: + resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} + dev: false + + /lodash/4.17.21: + resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==} + dev: false + + /loose-envify/1.4.0: + resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} + hasBin: true + dependencies: + js-tokens: 4.0.0 + dev: false + + /magic-string/0.26.7: + resolution: {integrity: sha512-hX9XH3ziStPoPhJxLq1syWuZMxbDvGNbVchfrdCtanC7D13888bMFow61x8axrx+GfHLtVeAx2kxL7tTGRl+Ow==} + engines: {node: '>=12'} + dependencies: + sourcemap-codec: 1.4.8 + dev: true + + /make-dir/2.1.0: + resolution: {integrity: sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==} + engines: {node: '>=6'} + requiresBuild: true + dependencies: + pify: 4.0.1 + semver: 5.7.1 + dev: true + optional: true + + /memoize-one/5.2.1: + resolution: {integrity: sha512-zYiwtZUcYyXKo/np96AGZAckk+FWWsUdJ3cHGGmld7+AhvcWmQyGCYUh1hc4Q/pkOhb65dQR/pqCyK0cOaHz4Q==} + dev: false + + /mime/1.6.0: + resolution: {integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==} + engines: {node: '>=4'} + hasBin: true + requiresBuild: true + dev: true + optional: true + + /moment/2.29.4: + resolution: {integrity: sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w==} + dev: false + + /ms/2.1.2: + resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} + + /ms/2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + dev: true + optional: true + + /nanoid/2.1.11: + resolution: {integrity: sha512-s/snB+WGm6uwi0WjsZdaVcuf3KJXlfGl2LcxgwkEwJF0D/BWzVWAZW/XY4bFaiR7s0Jk3FPvlnepg1H1b1UwlA==} + dev: false + + /nanoid/3.3.4: + resolution: {integrity: sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + hasBin: true + dev: true + + /needle/3.1.0: + resolution: {integrity: sha512-gCE9weDhjVGCRqS8dwDR/D3GTAeyXLXuqp7I8EzH6DllZGXSUyxuqqLh+YX9rMAWaaTFyVAg6rHGL25dqvczKw==} + engines: {node: '>= 4.4.x'} + hasBin: true + requiresBuild: true + dependencies: + debug: 3.2.7 + iconv-lite: 0.6.3 + sax: 1.2.4 + transitivePeerDependencies: + - supports-color + dev: true + optional: true + + /node-releases/2.0.6: + resolution: {integrity: sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg==} + dev: true + + /object-assign/4.1.1: + resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} + engines: {node: '>=0.10.0'} + dev: false + + /object-path/0.11.8: + resolution: {integrity: sha512-YJjNZrlXJFM42wTBn6zgOJVar9KFJvzx6sTWDte8sWZF//cnjl0BxHNpfZx+ZffXX63A9q0b1zsFiBX4g4X5KA==} + engines: {node: '>= 10.12.0'} + dev: false + + /parent-module/1.0.1: + resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} + engines: {node: '>=6'} + dependencies: + callsites: 3.1.0 + dev: false + + /parse-json/5.2.0: + resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} + engines: {node: '>=8'} + dependencies: + '@babel/code-frame': 7.18.6 + error-ex: 1.3.2 + json-parse-even-better-errors: 2.3.1 + lines-and-columns: 1.2.4 + dev: false + + /parse-node-version/1.0.1: + resolution: {integrity: sha512-3YHlOa/JgH6Mnpr05jP9eDG254US9ek25LyIxZlDItp2iJtwyaXQb57lBYLdT3MowkUFYEV2XXNAYIPlESvJlA==} + engines: {node: '>= 0.10'} + dev: true + + /path-parse/1.0.7: + resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} + + /path-type/4.0.0: + resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} + engines: {node: '>=8'} + dev: false + + /picocolors/1.0.0: + resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==} + dev: true + + /picomatch/2.3.1: + resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} + engines: {node: '>=8.6'} + dev: false + + /pify/4.0.1: + resolution: {integrity: sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==} + engines: {node: '>=6'} + dev: true + optional: true + + /popper.js/1.16.1-lts: + resolution: {integrity: sha512-Kjw8nKRl1m+VrSFCoVGPph93W/qrSO7ZkqPpTf7F4bk/sqcfWK019dWBUpE/fBOsOQY1dks/Bmcbfn1heM/IsA==} + dev: false + + /postcss-value-parser/4.2.0: + resolution: {integrity: sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==} + dev: false + + /postcss/8.4.18: + resolution: {integrity: sha512-Wi8mWhncLJm11GATDaQKobXSNEYGUHeQLiQqDFG1qQ5UTDPTEvKw0Xt5NsTpktGTwLps3ByrWsBrG0rB8YQ9oA==} + engines: {node: ^10 || ^12 || >=14} + dependencies: + nanoid: 3.3.4 + picocolors: 1.0.0 + source-map-js: 1.0.2 + dev: true + + /prettier/2.7.1: + resolution: {integrity: sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==} + engines: {node: '>=10.13.0'} + hasBin: true + dev: true + + /prop-types/15.8.1: + resolution: {integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==} + dependencies: + loose-envify: 1.4.0 + object-assign: 4.1.1 + react-is: 16.13.1 + dev: false + + /prr/1.0.1: + resolution: {integrity: sha512-yPw4Sng1gWghHQWj0B3ZggWUm4qVbPwPFcRG8KyxiU7J2OHFSoEHKS+EZ3fv5l1t9CyCiop6l/ZYeWbrgoQejw==} + dev: true + optional: true + + /react-display-name/0.2.5: + resolution: {integrity: sha512-I+vcaK9t4+kypiSgaiVWAipqHRXYmZIuAiS8vzFvXHHXVigg/sMKwlRgLy6LH2i3rmP+0Vzfl5lFsFRwF1r3pg==} + dev: false + + /react-dnd-html5-backend/11.1.3: + resolution: {integrity: sha512-/1FjNlJbW/ivkUxlxQd7o3trA5DE33QiRZgxent3zKme8DwF4Nbw3OFVhTRFGaYhHFNL1rZt6Rdj1D78BjnNLw==} + dependencies: + dnd-core: 11.1.3 + dev: false + + /react-dnd/11.1.3_biqbaboplfbrettd7655fr4n2y: + resolution: {integrity: sha512-8rtzzT8iwHgdSC89VktwhqdKKtfXaAyC4wiqp0SywpHG12TTLvfOoL6xNEIUWXwIEWu+CFfDn4GZJyynCEuHIQ==} + peerDependencies: + react: '>= 16.9.0' + react-dom: '>= 16.9.0' + dependencies: + '@react-dnd/shallowequal': 2.0.0 + '@types/hoist-non-react-statics': 3.3.1 + dnd-core: 11.1.3 + hoist-non-react-statics: 3.3.2 + react: 18.2.0 + react-dom: 18.2.0_react@18.2.0 + dev: false + + /react-dom/18.2.0_react@18.2.0: + resolution: {integrity: sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==} + peerDependencies: + react: ^18.2.0 + dependencies: + loose-envify: 1.4.0 + react: 18.2.0 + scheduler: 0.23.0 + dev: false + + /react-intl/5.25.1_uz2ogxeagc4rbieizlfzyaazee: + resolution: {integrity: sha512-pkjdQDvpJROoXLMltkP/5mZb0/XqrqLoPGKUCfbdkP8m6U9xbK40K51Wu+a4aQqTEvEK5lHBk0fWzUV72SJ3Hg==} + peerDependencies: + react: ^16.3.0 || 17 || 18 + typescript: ^4.5 + peerDependenciesMeta: + typescript: + optional: true + dependencies: + '@formatjs/ecma402-abstract': 1.11.4 + '@formatjs/icu-messageformat-parser': 2.1.0 + '@formatjs/intl': 2.2.1_typescript@4.8.4 + '@formatjs/intl-displaynames': 5.4.3 + '@formatjs/intl-listformat': 6.5.3 + '@types/hoist-non-react-statics': 3.3.1 + '@types/react': 18.0.21 + hoist-non-react-statics: 3.3.2 + intl-messageformat: 9.13.0 + react: 18.2.0 + tslib: 2.4.0 + typescript: 4.8.4 + dev: false + + /react-is/16.13.1: + resolution: {integrity: sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==} + dev: false + + /react-is/17.0.2: + resolution: {integrity: sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==} + dev: false + + /react-is/18.2.0: + resolution: {integrity: sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==} + dev: false + + /react-jss/10.9.2_react@18.2.0: + resolution: {integrity: sha512-f7azcJ3GhFXa3vNVF9IMN5ja/u1rEmyRhUiPa9KRdnnZ4wdN8xfCs5LShanRBslLXlD+OFi1Zzz66PwXSkd41w==} + peerDependencies: + react: '>=16.8.6' + dependencies: + '@babel/runtime': 7.20.6 + '@emotion/is-prop-valid': 0.7.3 + css-jss: 10.9.2 + hoist-non-react-statics: 3.3.2 + is-in-browser: 1.1.3 + jss: 10.9.2 + jss-preset-default: 10.9.2 + prop-types: 15.8.1 + react: 18.2.0 + shallow-equal: 1.2.1 + theming: 3.3.0_react@18.2.0 + tiny-warning: 1.0.3 + dev: false + + /react-redux/7.2.9_biqbaboplfbrettd7655fr4n2y: + resolution: {integrity: sha512-Gx4L3uM182jEEayZfRbI/G11ZpYdNAnBs70lFVMNdHJI76XYtR+7m0MN+eAs7UHBPhWXcnFPaS+9owSCJQHNpQ==} + peerDependencies: + react: ^16.8.3 || ^17 || ^18 + react-dom: '*' + react-native: '*' + peerDependenciesMeta: + react-dom: + optional: true + react-native: + optional: true + dependencies: + '@babel/runtime': 7.20.6 + '@types/react-redux': 7.1.24 + hoist-non-react-statics: 3.3.2 + loose-envify: 1.4.0 + prop-types: 15.8.1 + react: 18.2.0 + react-dom: 18.2.0_react@18.2.0 + react-is: 17.0.2 + dev: false + + /react-refresh/0.14.0: + resolution: {integrity: sha512-wViHqhAd8OHeLS/IRMJjTSDHF3U9eWi62F/MledQGPdJGDhodXJ9PBLNGr6WWL7qlH12Mt3TyTpbS+hGXMjCzQ==} + engines: {node: '>=0.10.0'} + dev: true + + /react-transition-group/4.4.5_biqbaboplfbrettd7655fr4n2y: + resolution: {integrity: sha512-pZcd1MCJoiKiBR2NRxeCRg13uCXbydPnmB4EOeRrY7480qNWO8IIgQG6zlDkm6uRMsURXPuKq0GWtiM59a5Q6g==} + peerDependencies: + react: '>=16.6.0' + react-dom: '>=16.6.0' + dependencies: + '@babel/runtime': 7.20.6 + dom-helpers: 5.2.1 + loose-envify: 1.4.0 + prop-types: 15.8.1 + react: 18.2.0 + react-dom: 18.2.0_react@18.2.0 + dev: false + + /react-virtualized-auto-sizer/1.0.7_biqbaboplfbrettd7655fr4n2y: + resolution: {integrity: sha512-Mxi6lwOmjwIjC1X4gABXMJcKHsOo0xWl3E3ugOgufB8GJU+MqrtY35aBuvCYv/razQ1Vbp7h1gWJjGjoNN5pmA==} + engines: {node: '>8.0.0'} + peerDependencies: + react: ^15.3.0 || ^16.0.0-alpha || ^17.0.0 || ^18.0.0-rc + react-dom: ^15.3.0 || ^16.0.0-alpha || ^17.0.0 || ^18.0.0-rc + dependencies: + react: 18.2.0 + react-dom: 18.2.0_react@18.2.0 + dev: false + + /react-window/1.8.7_biqbaboplfbrettd7655fr4n2y: + resolution: {integrity: sha512-JHEZbPXBpKMmoNO1bNhoXOOLg/ujhL/BU4IqVU9r8eQPcy5KQnGHIHDRkJ0ns9IM5+Aq5LNwt3j8t3tIrePQzA==} + engines: {node: '>8.0.0'} + peerDependencies: + react: ^15.0.0 || ^16.0.0 || ^17.0.0 || ^18.0.0 + react-dom: ^15.0.0 || ^16.0.0 || ^17.0.0 || ^18.0.0 + dependencies: + '@babel/runtime': 7.20.6 + memoize-one: 5.2.1 + react: 18.2.0 + react-dom: 18.2.0_react@18.2.0 + dev: false + + /react/18.2.0: + resolution: {integrity: sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==} + engines: {node: '>=0.10.0'} + dependencies: + loose-envify: 1.4.0 + dev: false + + /redux-thunk/2.4.1_redux@4.2.0: + resolution: {integrity: sha512-OOYGNY5Jy2TWvTL1KgAlVy6dcx3siPJ1wTq741EPyUKfn6W6nChdICjZwCd0p8AZBs5kWpZlbkXW2nE/zjUa+Q==} + peerDependencies: + redux: ^4 + dependencies: + redux: 4.2.0 + dev: false + + /redux-watch/1.2.0: + resolution: {integrity: sha512-Ws4Q+e5zFGMyy1H709c1Ws8apSd6MqoJRIzBDHbI4nikome/IZWVTYXdQNz+VJxPjyX/h2E+lYEo41fXgjCF8g==} + dependencies: + object-path: 0.11.8 + dev: false + + /redux/4.2.0: + resolution: {integrity: sha512-oSBmcKKIuIR4ME29/AeNUnl5L+hvBq7OaJWzaptTQJAntaPvxIJqfnjbaEiCzzaIz+XmVILfqAM3Ob0aXLPfjA==} + dependencies: + '@babel/runtime': 7.20.6 + dev: false + + /regenerator-runtime/0.13.11: + resolution: {integrity: sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==} + dev: false + + /reselect/4.1.6: + resolution: {integrity: sha512-ZovIuXqto7elwnxyXbBtCPo9YFEr3uJqj2rRbcOOog1bmu2Ag85M4hixSwFWyaBMKXNgvPaJ9OSu9SkBPIeJHQ==} + dev: false + + /resolve-from/4.0.0: + resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} + engines: {node: '>=4'} + dev: false + + /resolve/1.22.1: + resolution: {integrity: sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==} + hasBin: true + dependencies: + is-core-module: 2.10.0 + path-parse: 1.0.7 + supports-preserve-symlinks-flag: 1.0.0 + + /rollup/2.78.1: + resolution: {integrity: sha512-VeeCgtGi4P+o9hIg+xz4qQpRl6R401LWEXBmxYKOV4zlF82lyhgh2hTZnheFUbANE8l2A41F458iwj2vEYaXJg==} + engines: {node: '>=10.0.0'} + hasBin: true + optionalDependencies: + fsevents: 2.3.2 + dev: true + + /safer-buffer/2.1.2: + resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} + dev: true + optional: true + + /sax/1.2.4: + resolution: {integrity: sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==} + dev: true + optional: true + + /scheduler/0.23.0: + resolution: {integrity: sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==} + dependencies: + loose-envify: 1.4.0 + dev: false + + /semver/5.7.1: + resolution: {integrity: sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==} + hasBin: true + dev: true + optional: true + + /semver/6.3.0: + resolution: {integrity: sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==} + hasBin: true + dev: true + + /shallow-equal/1.2.1: + resolution: {integrity: sha512-S4vJDjHHMBaiZuT9NPb616CSmLf618jawtv3sufLl6ivK8WocjAo58cXwbRV1cgqxH0Qbv+iUt6m05eqEa2IRA==} + dev: false + + /shallowequal/1.1.0: + resolution: {integrity: sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ==} + dev: false + + /shortid/2.2.16: + resolution: {integrity: sha512-Ugt+GIZqvGXCIItnsL+lvFJOiN7RYqlGy7QE41O3YC1xbNSeDGIRO7xg2JJXIAj1cAGnOeC1r7/T9pgrtQbv4g==} + dependencies: + nanoid: 2.1.11 + dev: false + + /source-map-js/1.0.2: + resolution: {integrity: sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==} + engines: {node: '>=0.10.0'} + dev: true + + /source-map/0.5.7: + resolution: {integrity: sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==} + engines: {node: '>=0.10.0'} + dev: false + + /source-map/0.6.1: + resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} + engines: {node: '>=0.10.0'} + requiresBuild: true + dev: true + optional: true + + /sourcemap-codec/1.4.8: + resolution: {integrity: sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==} + dev: true + + /styled-components/5.3.6_7i5myeigehqah43i5u7wbekgba: + resolution: {integrity: sha512-hGTZquGAaTqhGWldX7hhfzjnIYBZ0IXQXkCYdvF1Sq3DsUaLx6+NTHC5Jj1ooM2F68sBiVz3lvhfwQs/S3l6qg==} + engines: {node: '>=10'} + requiresBuild: true + peerDependencies: + react: '>= 16.8.0' + react-dom: '>= 16.8.0' + react-is: '>= 16.8.0' + dependencies: + '@babel/helper-module-imports': 7.18.6 + '@babel/traverse': 7.19.4_supports-color@5.5.0 + '@emotion/is-prop-valid': 1.2.0 + '@emotion/stylis': 0.8.5 + '@emotion/unitless': 0.7.5 + babel-plugin-styled-components: 2.0.7_styled-components@5.3.6 + css-to-react-native: 3.0.0 + hoist-non-react-statics: 3.3.2 + react: 18.2.0 + react-dom: 18.2.0_react@18.2.0 + react-is: 18.2.0 + shallowequal: 1.1.0 + supports-color: 5.5.0 + dev: false + + /stylis/4.0.13: + resolution: {integrity: sha512-xGPXiFVl4YED9Jh7Euv2V220mriG9u4B2TA6Ybjc1catrstKD2PpIdU3U0RKpkVBC2EhmL/F0sPCr9vrFTNRag==} + dev: false + + /supports-color/5.5.0: + resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==} + engines: {node: '>=4'} + dependencies: + has-flag: 3.0.0 + + /supports-preserve-symlinks-flag/1.0.0: + resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} + engines: {node: '>= 0.4'} + + /symbol-observable/1.2.0: + resolution: {integrity: sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ==} + engines: {node: '>=0.10.0'} + dev: false + + /theming/3.3.0_react@18.2.0: + resolution: {integrity: sha512-u6l4qTJRDaWZsqa8JugaNt7Xd8PPl9+gonZaIe28vAhqgHMIG/DOyFPqiKN/gQLQYj05tHv+YQdNILL4zoiAVA==} + engines: {node: '>=8'} + peerDependencies: + react: '>=16.3' + dependencies: + hoist-non-react-statics: 3.3.2 + prop-types: 15.8.1 + react: 18.2.0 + react-display-name: 0.2.5 + tiny-warning: 1.0.3 + dev: false + + /tiny-warning/1.0.3: + resolution: {integrity: sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==} + dev: false + + /to-fast-properties/2.0.0: + resolution: {integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==} + engines: {node: '>=4'} + + /tsdef/0.0.14: + resolution: {integrity: sha512-UjMD4XKRWWFlFBfwKVQmGFT5YzW/ZaF8x6KpCDf92u9wgKeha/go3FU0e5WqDjXsCOdfiavCkfwfVHNDxRDGMA==} + + /tslib/2.4.0: + resolution: {integrity: sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==} + + /typescript/3.9.10: + resolution: {integrity: sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==} + engines: {node: '>=4.2.0'} + hasBin: true + dev: true + + /typescript/4.8.4: + resolution: {integrity: sha512-QCh+85mCy+h0IGff8r5XWzOVSbBO+KfeYrMQh7NJ58QujwcE22u+NUSmUxqF+un70P9GXKxa2HCNiTTMJknyjQ==} + engines: {node: '>=4.2.0'} + hasBin: true + + /update-browserslist-db/1.0.10_browserslist@4.21.4: + resolution: {integrity: sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ==} + hasBin: true + peerDependencies: + browserslist: '>= 4.21.0' + dependencies: + browserslist: 4.21.4 + escalade: 3.1.1 + picocolors: 1.0.0 + dev: true + + /vite/3.1.8_less@4.1.3: + resolution: {integrity: sha512-m7jJe3nufUbuOfotkntGFupinL/fmuTNuQmiVE7cH2IZMuf4UbfbGYMUT3jVWgGYuRVLY9j8NnrRqgw5rr5QTg==} + engines: {node: ^14.18.0 || >=16.0.0} + hasBin: true + peerDependencies: + less: '*' + sass: '*' + stylus: '*' + terser: ^5.4.0 + peerDependenciesMeta: + less: + optional: true + sass: + optional: true + stylus: + optional: true + terser: + optional: true + dependencies: + esbuild: 0.15.11 + less: 4.1.3 + postcss: 8.4.18 + resolve: 1.22.1 + rollup: 2.78.1 + optionalDependencies: + fsevents: 2.3.2 + dev: true + + /yaml/1.10.2: + resolution: {integrity: sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==} + engines: {node: '>= 6'} + dev: false diff --git a/frontend/public/vite.svg b/frontend/public/vite.svg new file mode 100644 index 0000000..e7b8dfb --- /dev/null +++ b/frontend/public/vite.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/src/actions.ts b/frontend/src/actions.ts new file mode 100644 index 0000000..6689bc7 --- /dev/null +++ b/frontend/src/actions.ts @@ -0,0 +1,26 @@ +import { FileData, FileArray, FileAction } from "chonky"; +import { defineFileAction } from "chonky"; + +type RenameFileState = { + contextMenuTriggerFile: FileData; + instanceId: string; + selectedFiles: FileArray; + selectedFilesForAction: FileArray; +}; + +export const RenameFileAction = defineFileAction({ + id: "rename_file", + requiresSelection: true, + button: { + name: "Rename File", + toolbar: true, + contextMenu: true, + group: "Actions", + icon: "edit", + }, + __extraStateType: {} as RenameFileState, +} as FileAction); + +export const RefreshListAction = defineFileAction({ + id: "refresh_list", +} as FileAction); diff --git a/frontend/src/api.ts b/frontend/src/api.ts new file mode 100644 index 0000000..c51b5fd --- /dev/null +++ b/frontend/src/api.ts @@ -0,0 +1,176 @@ +import { FileData } from "chonky"; +import { GrpcWebFetchTransport } from "@protobuf-ts/grpcweb-transport"; +import { ServiceClient, File, SourceFile } from "./entity"; + +import moment from "moment"; + +const apiBase: string = (() => { + const base = (window as any).apiBase as string; + if (!base || base === "%%API_BASE%%") { + return "http://127.0.0.1:8080/services"; + } + return base; +})(); + +export const ModeDir = 2147483648n; // d: is a directory + +export const Root: FileData = { + id: "0", + name: "Root", + isDir: true, + openable: true, + selectable: true, + draggable: true, + droppable: true, +}; + +export const sleep = (ms: number): Promise => + new Promise((resolve) => { + setTimeout(resolve, ms); + }); + +const transport = new GrpcWebFetchTransport({ + baseUrl: apiBase, + format: "binary", +}); + +export const cli = new ServiceClient(transport); +(window as any).cli = cli; + +export function convertFiles(files: Array): FileData[] { + return files.map((file) => { + const isDir = (file.mode & ModeDir) > 0; + + return { + id: getID(file), + name: file.name, + ext: extname(file.name), + isDir, + isHidden: file.name.startsWith("."), + openable: true, + selectable: true, + draggable: true, + droppable: isDir, + size: Number(file.size), + modDate: moment.unix(Number(file.modTime)).toDate(), + }; + }); +} + +export function convertSourceFiles(files: Array): FileData[] { + return files.map((file) => { + const isDir = (file.mode & ModeDir) > 0; + + return { + id: getID(file), + name: file.name, + ext: extname(file.name), + isDir, + isHidden: file.name.startsWith("."), + openable: isDir, + selectable: true, + draggable: true, + droppable: isDir, + size: Number(file.size), + modDate: moment.unix(Number(file.modTime)).toDate(), + }; + }); +} + +function extname(filename: string): string { + const idx = filename.lastIndexOf("."); + if (idx < 0) { + return ""; + } + return filename.slice(idx); +} + +function getID(file: File | SourceFile): string { + if ("id" in file) { + return `${file.id}`; + } + return file.path; +} + +// export interface GetFileResponse { +// file: File; +// positions: Position[]; +// children: FileArray; +// } +// export const getFile = async (id: string) => { +// const result = await fetch(`${Domain}/api/v1/file/${id}`); +// const body: GetFileResponse = await result.json(); +// return body; +// }; + +// export interface ListFileParentsResponse { +// parents: FileArray; +// } +// export const listFileParents = async (id: string) => { +// const result = await fetch(`${Domain}/api/v1/file/${id}/_parent`); +// const body: ListFileParentsResponse = await result.json(); +// return [Root, ...body.parents]; +// }; + +// export interface SetFileResponse { +// file?: File; +// result?: string; +// } +// export const editFile = async (id: string, payload: Partial) => { +// const result = await fetch(`${Domain}/api/v1/file/${id}`, { +// method: "POST", +// headers: { +// "Content-Type": "application/json", +// }, +// body: JSON.stringify(payload), +// }); +// const body: SetFileResponse = await result.json(); +// return body; +// }; + +// export const createFolder = async ( +// parentID: string, +// payload: Partial +// ) => { +// const result = await fetch(`${Domain}/api/v1/file/${parentID}/`, { +// method: "PUT", +// headers: { +// "Content-Type": "application/json", +// }, +// body: JSON.stringify(payload), +// }); +// const body: SetFileResponse = await result.json(); +// return body.file; +// }; + +// export const deleteFolder = async (ids: string[]) => { +// const result = await fetch(`${Domain}/api/v1/file/`, { +// method: "DELETE", +// headers: { +// "Content-Type": "application/json", +// }, +// body: JSON.stringify({ fileids: ids }), +// }); +// const body: SetFileResponse = await result.json(); +// return body; +// }; + +// interface GetTapeResponse { +// tape: Tape; +// } +// export const getTape = async (id: number) => { +// const result = await fetch(`${Domain}/api/v1/tape/${id}`); +// const body: GetTapeResponse = await result.json(); +// return body; +// }; + +// interface GetSourceResponse { +// file: File; +// chain: File[]; +// children: FileArray; +// } +// export const getSource = async (path: string) => { +// const result = await fetch(`${Domain}/api/v1/source/${path}`); +// const body: GetSourceResponse = await result.json(); +// return body; +// }; diff --git a/frontend/src/app.less b/frontend/src/app.less new file mode 100644 index 0000000..25b2a74 --- /dev/null +++ b/frontend/src/app.less @@ -0,0 +1,63 @@ +#app { + height: 100%; + width: 100%; + margin: 0; + text-align: center; + box-sizing: border-box; + + display: -webkit-flex; /* Safari */ + display: flex; + flex-direction: column; + + .tabs { + background-color: #ffffff; + } +} + +.browser-box { + background-color: #efefef; + padding: 0.5em; + height: 100%; + box-sizing: border-box; + + .browser-container { + margin: 0; + box-sizing: border-box; + height: 100%; + + .browser { + box-sizing: border-box; + padding-right: 0.5em; + + &:last-child { + padding-right: 0; + } + + .job-detail { + .app-MuiGrid-item { + padding-top: 1em; + padding-left: 1em; + } + + margin-bottom: 0.5em; + &:last-child { + margin-bottom: 0; + } + } + } + } +} + +.view-log-dialog { + .app-MuiDialog-paperScrollPaper { + height: 100%; + } + + pre { + white-space: pre-wrap; /* Since CSS 2.1 */ + white-space: -moz-pre-wrap; /* Mozilla, since 1999 */ + white-space: -pre-wrap; /* Opera 4-6 */ + white-space: -o-pre-wrap; /* Opera 7 */ + word-wrap: break-word; /* Internet Explorer 5.5+ */ + } +} diff --git a/frontend/src/app.tsx b/frontend/src/app.tsx new file mode 100644 index 0000000..345579e --- /dev/null +++ b/frontend/src/app.tsx @@ -0,0 +1,76 @@ +import { useEffect } from "react"; +import { useState, useCallback } from "react"; +import { ChangeEvent } from "react"; + +import Tabs from "@mui/material/Tabs"; +import Tab from "@mui/material/Tab"; +import { createTheme, ThemeProvider, styled } from "@mui/material/styles"; + +import { FileBrowser, FileBrowserType } from "./file"; +import { BackupBrowser, BackupType } from "./backup"; +import { JobsBrowser, JobsType } from "./jobs"; + +import "./app.less"; +import { sleep } from "./api"; +import { Nullable } from "tsdef"; + +// import reactLogo from './assets/react.svg' +// React logo + +const theme = createTheme({}); + +const typeToElement = (type: string) => { + switch (type) { + case FileBrowserType: + return ; + case BackupType: + return ; + case JobsType: + return ; + default: + return null; + } +}; + +const App = () => { + const [tabValue, setTabValue] = useState(FileBrowserType); + const [inner, setInner] = useState>(null); + + const setType = useCallback( + (newValue: string) => { + (async () => { + setTabValue(newValue); + setInner(null); + await sleep(0); + setInner(typeToElement(newValue)); + })(); + }, + [setTabValue, setInner] + ); + + const handleTabChange = useCallback( + (_: ChangeEvent<{}>, newValue: string) => { + setType(newValue); + }, + [setTabValue] + ); + + useEffect(() => { + setType(FileBrowserType); + }, []); + + return ( +
+ + + + + + + + {inner} +
+ ); +}; + +export default App; diff --git a/frontend/src/assets/react.svg b/frontend/src/assets/react.svg new file mode 100644 index 0000000..6c87de9 --- /dev/null +++ b/frontend/src/assets/react.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/src/backup.tsx b/frontend/src/backup.tsx new file mode 100644 index 0000000..b91dfa3 --- /dev/null +++ b/frontend/src/backup.tsx @@ -0,0 +1,327 @@ +import { useState, useEffect, useMemo, useCallback, FC } from "react"; + +import Grid from "@mui/material/Grid"; +import Box from "@mui/material/Box"; +import { FullFileBrowser, FileBrowser, FileNavbar, FileToolbar, FileList, FileContextMenu, FileArray } from "chonky"; +import { ChonkyActions, ChonkyFileActionData } from "chonky"; + +import { DndProvider as UntypedDndProvider, useDrop, DndProviderProps } from "react-dnd"; +import { HTML5Backend } from "react-dnd-html5-backend"; + +import "./app.less"; +import { cli, convertSourceFiles } from "./api"; +import { Root } from "./api"; +import { RenameFileAction, RefreshListAction } from "./actions"; + +import { useDetailModal, DetailModal, Detail } from "./detail"; + +const DndProvider = UntypedDndProvider as FC & { children: JSX.Element[] }>; + +const useBackupSourceBrowser = () => + // openDetailModel: (detail: Detail) => void + { + const [files, setFiles] = useState(Array(1).fill(null)); + const [folderChain, setFolderChan] = useState([Root]); + // const currentID = useMemo(() => { + // if (folderChain.length === 0) { + // return "0"; + // } + + // const last = folderChain.slice(-1)[0]; + // if (!last) { + // return "0"; + // } + + // return last.id; + // }, [folderChain]); + + const openFolder = useCallback((path: string) => { + (async () => { + const result = await cli.sourceList({ path }).response; + console.log("source list", { + path, + result, + converted: convertSourceFiles(result.children), + }); + + setFiles(convertSourceFiles(result.children)); + setFolderChan(convertSourceFiles(result.chain)); + })(); + }, []); + useEffect(() => openFolder(""), []); + + const onFileAction = useCallback( + (data: ChonkyFileActionData) => { + // console.log(data); + switch (data.id) { + case ChonkyActions.OpenFiles.id: + (async () => { + const { targetFile, files } = data.payload; + + const fileToOpen = targetFile ?? files[0]; + if (!fileToOpen) { + return; + } + + if (fileToOpen.isDir) { + await openFolder(fileToOpen.id); + return; + } + + // const file = await getFile(fileToOpen.id); + // await openDetailModel(file); + })(); + + return; + // case ChonkyActions.MoveFiles.id: + // (async () => { + // const { destination, files } = data.payload; + // for (const file of files) { + // await editFile(file.id, { parentid: destination.id }); + // } + // await refreshAll(); + // })(); + + // return; + // case RenameFileAction.id: + // (async () => { + // const files = data.state.selectedFilesForAction; + // if (files.length === 0) { + // return; + // } + // const file = files[0]; + + // const name = prompt("Provide new name for this file:", file.name); + // if (!name) { + // return; + // } + + // await editFile(file.id, { name }); + // await refreshAll(); + // })(); + // return; + // case ChonkyActions.CreateFolder.id: + // (async () => { + // const name = prompt("Provide the name for your new folder:"); + // if (!name) { + // return; + // } + + // await createFolder(currentID, { name }); + // await refreshAll(); + // })(); + // return; + // case ChonkyActions.DeleteFiles.id: + // (async () => { + // const files = data.state.selectedFilesForAction; + // const fileids = files.map((file) => file.id); + // await deleteFolder(fileids); + // await refreshAll(); + // })(); + + // return; + // case RefreshListAction.id: + // openFolder(currentID); + // return; + } + }, + [openFolder] + ); + + const fileActions = useMemo(() => [ChonkyActions.StartDragNDrop, RefreshListAction], []); + + return { + files, + folderChain, + onFileAction, + fileActions, + defaultFileViewActionId: ChonkyActions.EnableListView.id, + doubleClickDelay: 300, + }; + }; + +const useBackupTargetBrowser = () => + // openDetailModel: (detail: Detail) => void + { + const [files, setFiles] = useState(Array(1).fill(null)); + const [folderChain, setFolderChan] = useState([Root]); + // const currentID = useMemo(() => { + // if (folderChain.length === 0) { + // return "0"; + // } + + // const last = folderChain.slice(-1)[0]; + // if (!last) { + // return "0"; + // } + + // return last.id; + // }, [folderChain]); + + const openFolder = useCallback((path: string) => { + (async () => { + const result = await cli.sourceList({ path }).response; + result.chain[0].name = "BackupSource"; + + setFiles(convertSourceFiles(result.children)); + setFolderChan(convertSourceFiles(result.chain)); + })(); + }, []); + useEffect(() => openFolder(""), []); + + const onFileAction = useCallback( + (data: ChonkyFileActionData) => { + // console.log(data); + switch (data.id) { + case ChonkyActions.OpenFiles.id: + (async () => { + const { targetFile, files } = data.payload; + + const fileToOpen = targetFile ?? files[0]; + if (!fileToOpen) { + return; + } + + if (fileToOpen.isDir) { + await openFolder(fileToOpen.id); + return; + } + + // const file = await getFile(fileToOpen.id); + // await openDetailModel(file); + })(); + + return; + // case ChonkyActions.MoveFiles.id: + // (async () => { + // const { destination, files } = data.payload; + // for (const file of files) { + // await editFile(file.id, { parentid: destination.id }); + // } + // await refreshAll(); + // })(); + + // return; + // case RenameFileAction.id: + // (async () => { + // const files = data.state.selectedFilesForAction; + // if (files.length === 0) { + // return; + // } + // const file = files[0]; + + // const name = prompt("Provide new name for this file:", file.name); + // if (!name) { + // return; + // } + + // await editFile(file.id, { name }); + // await refreshAll(); + // })(); + // return; + // case ChonkyActions.CreateFolder.id: + // (async () => { + // const name = prompt("Provide the name for your new folder:"); + // if (!name) { + // return; + // } + + // await createFolder(currentID, { name }); + // await refreshAll(); + // })(); + // return; + // case ChonkyActions.DeleteFiles.id: + // (async () => { + // const files = data.state.selectedFilesForAction; + // const fileids = files.map((file) => file.id); + // await deleteFolder(fileids); + // await refreshAll(); + // })(); + + // return; + // case RefreshListAction.id: + // openFolder(currentID); + // return; + } + }, + [openFolder] + ); + + const fileActions = useMemo(() => [ChonkyActions.StartDragNDrop, RefreshListAction], []); + + return { + files, + folderChain, + onFileAction, + fileActions, + defaultFileViewActionId: ChonkyActions.EnableListView.id, + doubleClickDelay: 300, + }; + }; + +// const CustomDropZone = () => { +// const [maybeImpostor, setMaybeImpostor] = useState(null); +// const [{ isOver, canDrop }, drop] = useDrop({ +// accept: ChonkyDndFileEntryType, +// drop: (item: ChonkyDndFileEntryItem) => { +// setMaybeImpostor(item.payload.draggedFile.name); +// console.log("DnD payload:", item.payload); +// }, +// // canDrop: (item: ChonkyDndFileEntryItem) => !item.payload.draggedFile.isDir, +// canDrop: (item: ChonkyDndFileEntryItem) => true, +// collect: (monitor) => ({ +// isOver: monitor.isOver(), +// canDrop: monitor.canDrop(), +// }), +// }); +// return ( +//
+// {isOver +// ? canDrop +// ? "C'mon, drop 'em!" +// : "Folders are not allowed!" +// : maybeImpostor +// ? `${maybeImpostor} was not the impostor.` +// : "Drag & drop a (Chonky) file here"} +//
+// ); +// }; + +export const BackupType = "backup"; + +export const BackupBrowser = () => { + const sourceProps = useBackupSourceBrowser(); + const targetProps = useBackupTargetBrowser(); + + return ( + + + + {/* */} + + + + + + + + + + + + + ); +}; diff --git a/frontend/src/detail.less b/frontend/src/detail.less new file mode 100644 index 0000000..25336e2 --- /dev/null +++ b/frontend/src/detail.less @@ -0,0 +1,23 @@ +.detail-content { + .position { + .app-MuiDialogContent-dividers { + border-bottom: none; + } + + .app-MuiGrid-item { + width: 100%; + padding: 0.5em; + + p, pre { + margin: 0.2em 0; + + white-space: pre-wrap; /* Since CSS 2.1 */ + white-space: -moz-pre-wrap; /* Mozilla, since 1999 */ + white-space: -pre-wrap; /* Opera 4-6 */ + white-space: -o-pre-wrap; /* Opera 7 */ + word-wrap: break-word; /* Internet Explorer 5.5+ */ + } + } + } +} + diff --git a/frontend/src/detail.tsx b/frontend/src/detail.tsx new file mode 100644 index 0000000..54411ac --- /dev/null +++ b/frontend/src/detail.tsx @@ -0,0 +1,186 @@ +import { Nullable } from "tsdef"; + +import Dialog, { DialogProps } from "@mui/material/Dialog"; +import DialogContent from "@mui/material/DialogContent"; +import DialogTitle from "@mui/material/DialogTitle"; + +import { Grid } from "@mui/material"; +import moment from "moment"; + +import { useState, useCallback } from "react"; + +import "./app.less"; +import { cli } from "./api"; +import { formatFilesize } from "./tools"; + +import "./detail.less"; +import { FileGetReply, Tape } from "./entity"; + +export type Detail = FileGetReply & { + tapes: Map; +}; + +export const useDetailModal = () => { + const [detail, setDetail] = useState>(null); + const openDetailModel = useCallback( + (detail: FileGetReply) => { + (async () => { + const tapeList = await cli.tapeMGet({ + ids: detail.positions.map((posi) => posi.tapeId), + }).response; + + const tapes = new Map(); + for (const tape of tapeList.tapes) { + tapes.set(tape.id, tape); + } + + setDetail({ ...detail, tapes }); + })(); + }, + [setDetail] + ); + const closeDetailModel = useCallback(() => { + setDetail(null); + }, [setDetail]); + + return { detail, closeDetailModel, openDetailModel }; +}; + +export const DetailModal = (props: Omit & { detail: Nullable }) => { + const { detail, ...otherProps } = props; + if (!detail) { + return null; + } + + return ( + + {detail.file?.name} + +
+ {detail.positions.map((posi) => { + const tape = detail.tapes?.get(posi.tapeId); + if (!tape) { + return null; + } + + return ( + + + +

+ Tape ID +

+
+ +

{tape?.barcode}

+
+ + +

+ Tape Name +

+
+ +

{tape?.name}

+
+ + +

+ Tape Create Time +

+
+ +

{tape?.createTime ? moment.unix(Number(tape.createTime)).format() : "--"}

+
+ + +

+ Tape Destroy Time +

+
+ +

{tape?.destroyTime ? (moment(Number(tape.destroyTime)).format() as string) : "--"}

+
+ + +

+ Tape Capacity +

+
+ +

{formatFilesize(tape?.capacityBytes)}

+
+ + +

+ Tape Writen +

+
+ +

{formatFilesize(tape?.writenBytes)}

+
+ + +

+ Path +

+
+ +
{posi.path}
+
+ + +

+ Permission +

+
+ +

{(Number(posi.mode) & 0o777).toString(8)}

+
+ + +

+ Modify Time +

+
+ +

{moment.unix(Number(posi.modTime)).format()}

+
+ + +

+ Write Time +

+
+ +

{moment.unix(Number(posi.writeTime)).format()}

+
+ + +

+ Size +

+
+ +

{formatFilesize(posi.size)}

+
+
+
+ ); + })} +
+ {/* + */} + {/* + + + */} +
+ ); + + // return ; +}; diff --git a/frontend/src/entity/copy_status.ts b/frontend/src/entity/copy_status.ts new file mode 100644 index 0000000..c907a23 --- /dev/null +++ b/frontend/src/entity/copy_status.ts @@ -0,0 +1,34 @@ +// @generated by protobuf-ts 2.8.2 +// @generated from protobuf file "copy_status.proto" (package "copy_status", syntax proto3) +// tslint:disable +/** + * @generated from protobuf enum copy_status.CopyStatus + */ +export enum CopyStatus { + /** + * @generated from protobuf enum value: Draft = 0; + */ + Draft = 0, + /** + * waiting in queue + * + * @generated from protobuf enum value: Pending = 1; + */ + Pending = 1, + /** + * @generated from protobuf enum value: Running = 2; + */ + Running = 2, + /** + * @generated from protobuf enum value: Staged = 3; + */ + Staged = 3, + /** + * @generated from protobuf enum value: Submited = 4; + */ + Submited = 4, + /** + * @generated from protobuf enum value: Failed = 255; + */ + Failed = 255 +} diff --git a/frontend/src/entity/file.ts b/frontend/src/entity/file.ts new file mode 100644 index 0000000..42077a4 --- /dev/null +++ b/frontend/src/entity/file.ts @@ -0,0 +1,202 @@ +// @generated by protobuf-ts 2.8.2 +// @generated from protobuf file "file.proto" (package "file", syntax proto3) +// tslint:disable +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +/** + * @generated from protobuf message file.File + */ +export interface File { + /** + * @generated from protobuf field: int64 id = 1; + */ + id: bigint; + /** + * @generated from protobuf field: int64 parent_id = 2; + */ + parentId: bigint; + /** + * @generated from protobuf field: string name = 3; + */ + name: string; + /** + * @generated from protobuf field: int64 mode = 17; + */ + mode: bigint; + /** + * @generated from protobuf field: int64 mod_time = 18; + */ + modTime: bigint; + /** + * @generated from protobuf field: int64 size = 19; + */ + size: bigint; + /** + * @generated from protobuf field: bytes hash = 20; + */ + hash: Uint8Array; +} +/** + * @generated from protobuf message file.EditedFile + */ +export interface EditedFile { + /** + * @generated from protobuf field: optional int64 parent_id = 2; + */ + parentId?: bigint; + /** + * @generated from protobuf field: optional string name = 3; + */ + name?: string; +} +// @generated message type with reflection information, may provide speed optimized methods +class File$Type extends MessageType { + constructor() { + super("file.File", [ + { no: 1, name: "id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 2, name: "parent_id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 17, name: "mode", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 18, name: "mod_time", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 19, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 20, name: "hash", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): File { + const message = { id: 0n, parentId: 0n, name: "", mode: 0n, modTime: 0n, size: 0n, hash: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: File): File { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 id */ 1: + message.id = reader.int64().toBigInt(); + break; + case /* int64 parent_id */ 2: + message.parentId = reader.int64().toBigInt(); + break; + case /* string name */ 3: + message.name = reader.string(); + break; + case /* int64 mode */ 17: + message.mode = reader.int64().toBigInt(); + break; + case /* int64 mod_time */ 18: + message.modTime = reader.int64().toBigInt(); + break; + case /* int64 size */ 19: + message.size = reader.int64().toBigInt(); + break; + case /* bytes hash */ 20: + message.hash = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: File, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int64 id = 1; */ + if (message.id !== 0n) + writer.tag(1, WireType.Varint).int64(message.id); + /* int64 parent_id = 2; */ + if (message.parentId !== 0n) + writer.tag(2, WireType.Varint).int64(message.parentId); + /* string name = 3; */ + if (message.name !== "") + writer.tag(3, WireType.LengthDelimited).string(message.name); + /* int64 mode = 17; */ + if (message.mode !== 0n) + writer.tag(17, WireType.Varint).int64(message.mode); + /* int64 mod_time = 18; */ + if (message.modTime !== 0n) + writer.tag(18, WireType.Varint).int64(message.modTime); + /* int64 size = 19; */ + if (message.size !== 0n) + writer.tag(19, WireType.Varint).int64(message.size); + /* bytes hash = 20; */ + if (message.hash.length) + writer.tag(20, WireType.LengthDelimited).bytes(message.hash); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message file.File + */ +export const File = new File$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class EditedFile$Type extends MessageType { + constructor() { + super("file.EditedFile", [ + { no: 2, name: "parent_id", kind: "scalar", opt: true, T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 3, name: "name", kind: "scalar", opt: true, T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): EditedFile { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: EditedFile): EditedFile { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* optional int64 parent_id */ 2: + message.parentId = reader.int64().toBigInt(); + break; + case /* optional string name */ 3: + message.name = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: EditedFile, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* optional int64 parent_id = 2; */ + if (message.parentId !== undefined) + writer.tag(2, WireType.Varint).int64(message.parentId); + /* optional string name = 3; */ + if (message.name !== undefined) + writer.tag(3, WireType.LengthDelimited).string(message.name); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message file.EditedFile + */ +export const EditedFile = new EditedFile$Type(); diff --git a/frontend/src/entity/gen_index.sh b/frontend/src/entity/gen_index.sh new file mode 100755 index 0000000..4511ab8 --- /dev/null +++ b/frontend/src/entity/gen_index.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash +set -e + +CURDIR=$(cd $(dirname $0); pwd); +cd ${CURDIR}; + +echo '' > index.ts; + +FILES=`ls *.ts | grep -v index.ts | sed -e 's/\.ts$//'`; +for file in ${FILES}; do + echo "export * from \"./${file}\";" >> index.ts; +done diff --git a/frontend/src/entity/index.ts b/frontend/src/entity/index.ts new file mode 100644 index 0000000..43ee895 --- /dev/null +++ b/frontend/src/entity/index.ts @@ -0,0 +1,11 @@ + +export * from "./copy_status"; +export * from "./file"; +export * from "./job"; +export * from "./job_archive"; +export * from "./job_restore"; +export * from "./position"; +export * from "./service.client"; +export * from "./service"; +export * from "./source"; +export * from "./tape"; diff --git a/frontend/src/entity/job.ts b/frontend/src/entity/job.ts new file mode 100644 index 0000000..3b37030 --- /dev/null +++ b/frontend/src/entity/job.ts @@ -0,0 +1,574 @@ +// @generated by protobuf-ts 2.8.2 +// @generated from protobuf file "job.proto" (package "job", syntax proto3) +// tslint:disable +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +import { JobDisplayArchive } from "./job_archive"; +import { JobArchiveNextParam } from "./job_archive"; +import { JobStateArchive } from "./job_archive"; +import { JobParamArchive } from "./job_archive"; +/** + * @generated from protobuf message job.Job + */ +export interface Job { + /** + * @generated from protobuf field: int64 id = 1; + */ + id: bigint; + /** + * @generated from protobuf field: job.JobStatus status = 2; + */ + status: JobStatus; + /** + * @generated from protobuf field: int64 priority = 3; + */ + priority: bigint; + /** + * @generated from protobuf field: int64 create_time = 4; + */ + createTime: bigint; + /** + * @generated from protobuf field: int64 update_time = 5; + */ + updateTime: bigint; + /** + * @generated from protobuf field: job.JobState state = 17; + */ + state?: JobState; +} +/** + * @generated from protobuf message job.JobParam + */ +export interface JobParam { + /** + * @generated from protobuf oneof: param + */ + param: { + oneofKind: "archive"; + /** + * @generated from protobuf field: job_archive.JobParamArchive Archive = 1 [json_name = "Archive"]; + */ + archive: JobParamArchive; + } | { + oneofKind: undefined; + }; +} +/** + * @generated from protobuf message job.JobState + */ +export interface JobState { + /** + * @generated from protobuf oneof: state + */ + state: { + oneofKind: "archive"; + /** + * @generated from protobuf field: job_archive.JobStateArchive Archive = 1 [json_name = "Archive"]; + */ + archive: JobStateArchive; + } | { + oneofKind: undefined; + }; +} +/** + * @generated from protobuf message job.JobNextParam + */ +export interface JobNextParam { + /** + * @generated from protobuf oneof: param + */ + param: { + oneofKind: "archive"; + /** + * @generated from protobuf field: job_archive.JobArchiveNextParam archive = 1; + */ + archive: JobArchiveNextParam; + } | { + oneofKind: undefined; + }; +} +/** + * @generated from protobuf message job.CreatableJob + */ +export interface CreatableJob { + /** + * @generated from protobuf field: int64 priority = 3; + */ + priority: bigint; + /** + * @generated from protobuf field: job.JobParam param = 17; + */ + param?: JobParam; +} +/** + * @generated from protobuf message job.JobFilter + */ +export interface JobFilter { + /** + * @generated from protobuf field: optional job.JobStatus status = 1; + */ + status?: JobStatus; + /** + * @generated from protobuf field: optional int64 limit = 33; + */ + limit?: bigint; + /** + * @generated from protobuf field: optional int64 offset = 34; + */ + offset?: bigint; +} +/** + * @generated from protobuf message job.JobDisplay + */ +export interface JobDisplay { + /** + * @generated from protobuf oneof: display + */ + display: { + oneofKind: "archive"; + /** + * @generated from protobuf field: job_archive.JobDisplayArchive archive = 1; + */ + archive: JobDisplayArchive; + } | { + oneofKind: undefined; + }; +} +/** + * @generated from protobuf enum job.JobStatus + */ +export enum JobStatus { + /** + * @generated from protobuf enum value: Draft = 0; + */ + Draft = 0, + /** + * dependencies not satisfied + * + * @generated from protobuf enum value: NotReady = 1; + */ + NotReady = 1, + /** + * waiting in queue + * + * @generated from protobuf enum value: Pending = 2; + */ + Pending = 2, + /** + * @generated from protobuf enum value: Processing = 3; + */ + Processing = 3, + /** + * @generated from protobuf enum value: Completed = 4; + */ + Completed = 4, + /** + * @generated from protobuf enum value: Failed = 255; + */ + Failed = 255 +} +// @generated message type with reflection information, may provide speed optimized methods +class Job$Type extends MessageType { + constructor() { + super("job.Job", [ + { no: 1, name: "id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 2, name: "status", kind: "enum", T: () => ["job.JobStatus", JobStatus] }, + { no: 3, name: "priority", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 4, name: "create_time", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 5, name: "update_time", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 17, name: "state", kind: "message", T: () => JobState } + ]); + } + create(value?: PartialMessage): Job { + const message = { id: 0n, status: 0, priority: 0n, createTime: 0n, updateTime: 0n }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Job): Job { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 id */ 1: + message.id = reader.int64().toBigInt(); + break; + case /* job.JobStatus status */ 2: + message.status = reader.int32(); + break; + case /* int64 priority */ 3: + message.priority = reader.int64().toBigInt(); + break; + case /* int64 create_time */ 4: + message.createTime = reader.int64().toBigInt(); + break; + case /* int64 update_time */ 5: + message.updateTime = reader.int64().toBigInt(); + break; + case /* job.JobState state */ 17: + message.state = JobState.internalBinaryRead(reader, reader.uint32(), options, message.state); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Job, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int64 id = 1; */ + if (message.id !== 0n) + writer.tag(1, WireType.Varint).int64(message.id); + /* job.JobStatus status = 2; */ + if (message.status !== 0) + writer.tag(2, WireType.Varint).int32(message.status); + /* int64 priority = 3; */ + if (message.priority !== 0n) + writer.tag(3, WireType.Varint).int64(message.priority); + /* int64 create_time = 4; */ + if (message.createTime !== 0n) + writer.tag(4, WireType.Varint).int64(message.createTime); + /* int64 update_time = 5; */ + if (message.updateTime !== 0n) + writer.tag(5, WireType.Varint).int64(message.updateTime); + /* job.JobState state = 17; */ + if (message.state) + JobState.internalBinaryWrite(message.state, writer.tag(17, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message job.Job + */ +export const Job = new Job$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobParam$Type extends MessageType { + constructor() { + super("job.JobParam", [ + { no: 1, name: "Archive", kind: "message", jsonName: "Archive", oneof: "param", T: () => JobParamArchive } + ]); + } + create(value?: PartialMessage): JobParam { + const message = { param: { oneofKind: undefined } }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobParam): JobParam { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* job_archive.JobParamArchive Archive = 1 [json_name = "Archive"];*/ 1: + message.param = { + oneofKind: "archive", + archive: JobParamArchive.internalBinaryRead(reader, reader.uint32(), options, (message.param as any).archive) + }; + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobParam, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* job_archive.JobParamArchive Archive = 1 [json_name = "Archive"]; */ + if (message.param.oneofKind === "archive") + JobParamArchive.internalBinaryWrite(message.param.archive, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message job.JobParam + */ +export const JobParam = new JobParam$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobState$Type extends MessageType { + constructor() { + super("job.JobState", [ + { no: 1, name: "Archive", kind: "message", jsonName: "Archive", oneof: "state", T: () => JobStateArchive } + ]); + } + create(value?: PartialMessage): JobState { + const message = { state: { oneofKind: undefined } }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobState): JobState { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* job_archive.JobStateArchive Archive = 1 [json_name = "Archive"];*/ 1: + message.state = { + oneofKind: "archive", + archive: JobStateArchive.internalBinaryRead(reader, reader.uint32(), options, (message.state as any).archive) + }; + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobState, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* job_archive.JobStateArchive Archive = 1 [json_name = "Archive"]; */ + if (message.state.oneofKind === "archive") + JobStateArchive.internalBinaryWrite(message.state.archive, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message job.JobState + */ +export const JobState = new JobState$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobNextParam$Type extends MessageType { + constructor() { + super("job.JobNextParam", [ + { no: 1, name: "archive", kind: "message", oneof: "param", T: () => JobArchiveNextParam } + ]); + } + create(value?: PartialMessage): JobNextParam { + const message = { param: { oneofKind: undefined } }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobNextParam): JobNextParam { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* job_archive.JobArchiveNextParam archive */ 1: + message.param = { + oneofKind: "archive", + archive: JobArchiveNextParam.internalBinaryRead(reader, reader.uint32(), options, (message.param as any).archive) + }; + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobNextParam, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* job_archive.JobArchiveNextParam archive = 1; */ + if (message.param.oneofKind === "archive") + JobArchiveNextParam.internalBinaryWrite(message.param.archive, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message job.JobNextParam + */ +export const JobNextParam = new JobNextParam$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class CreatableJob$Type extends MessageType { + constructor() { + super("job.CreatableJob", [ + { no: 3, name: "priority", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 17, name: "param", kind: "message", T: () => JobParam } + ]); + } + create(value?: PartialMessage): CreatableJob { + const message = { priority: 0n }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreatableJob): CreatableJob { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 priority */ 3: + message.priority = reader.int64().toBigInt(); + break; + case /* job.JobParam param */ 17: + message.param = JobParam.internalBinaryRead(reader, reader.uint32(), options, message.param); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: CreatableJob, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int64 priority = 3; */ + if (message.priority !== 0n) + writer.tag(3, WireType.Varint).int64(message.priority); + /* job.JobParam param = 17; */ + if (message.param) + JobParam.internalBinaryWrite(message.param, writer.tag(17, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message job.CreatableJob + */ +export const CreatableJob = new CreatableJob$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobFilter$Type extends MessageType { + constructor() { + super("job.JobFilter", [ + { no: 1, name: "status", kind: "enum", opt: true, T: () => ["job.JobStatus", JobStatus] }, + { no: 33, name: "limit", kind: "scalar", opt: true, T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 34, name: "offset", kind: "scalar", opt: true, T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ } + ]); + } + create(value?: PartialMessage): JobFilter { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobFilter): JobFilter { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* optional job.JobStatus status */ 1: + message.status = reader.int32(); + break; + case /* optional int64 limit */ 33: + message.limit = reader.int64().toBigInt(); + break; + case /* optional int64 offset */ 34: + message.offset = reader.int64().toBigInt(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobFilter, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* optional job.JobStatus status = 1; */ + if (message.status !== undefined) + writer.tag(1, WireType.Varint).int32(message.status); + /* optional int64 limit = 33; */ + if (message.limit !== undefined) + writer.tag(33, WireType.Varint).int64(message.limit); + /* optional int64 offset = 34; */ + if (message.offset !== undefined) + writer.tag(34, WireType.Varint).int64(message.offset); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message job.JobFilter + */ +export const JobFilter = new JobFilter$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobDisplay$Type extends MessageType { + constructor() { + super("job.JobDisplay", [ + { no: 1, name: "archive", kind: "message", oneof: "display", T: () => JobDisplayArchive } + ]); + } + create(value?: PartialMessage): JobDisplay { + const message = { display: { oneofKind: undefined } }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobDisplay): JobDisplay { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* job_archive.JobDisplayArchive archive */ 1: + message.display = { + oneofKind: "archive", + archive: JobDisplayArchive.internalBinaryRead(reader, reader.uint32(), options, (message.display as any).archive) + }; + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobDisplay, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* job_archive.JobDisplayArchive archive = 1; */ + if (message.display.oneofKind === "archive") + JobDisplayArchive.internalBinaryWrite(message.display.archive, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message job.JobDisplay + */ +export const JobDisplay = new JobDisplay$Type(); diff --git a/frontend/src/entity/job_archive.ts b/frontend/src/entity/job_archive.ts new file mode 100644 index 0000000..9c0f9d7 --- /dev/null +++ b/frontend/src/entity/job_archive.ts @@ -0,0 +1,498 @@ +// @generated by protobuf-ts 2.8.2 +// @generated from protobuf file "job_archive.proto" (package "job_archive", syntax proto3) +// tslint:disable +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +import { SourceState } from "./source"; +import { Source } from "./source"; +/** + * @generated from protobuf message job_archive.JobParamArchive + */ +export interface JobParamArchive { + /** + * @generated from protobuf field: repeated source.Source sources = 1; + */ + sources: Source[]; +} +/** + * @generated from protobuf message job_archive.JobArchiveNextParam + */ +export interface JobArchiveNextParam { + /** + * @generated from protobuf oneof: param + */ + param: { + oneofKind: "waitForTape"; + /** + * @generated from protobuf field: job_archive.JobArchiveWaitForTapeParam WaitForTape = 1 [json_name = "WaitForTape"]; + */ + waitForTape: JobArchiveWaitForTapeParam; + } | { + oneofKind: "copying"; + /** + * @generated from protobuf field: job_archive.JobArchiveCopyingParam Copying = 2 [json_name = "Copying"]; + */ + copying: JobArchiveCopyingParam; + } | { + oneofKind: "finished"; + /** + * @generated from protobuf field: job_archive.JobArchiveFinishedParam Finished = 255 [json_name = "Finished"]; + */ + finished: JobArchiveFinishedParam; + } | { + oneofKind: undefined; + }; +} +/** + * @generated from protobuf message job_archive.JobArchiveWaitForTapeParam + */ +export interface JobArchiveWaitForTapeParam { +} +/** + * @generated from protobuf message job_archive.JobArchiveCopyingParam + */ +export interface JobArchiveCopyingParam { + /** + * @generated from protobuf field: string device = 1; + */ + device: string; + /** + * @generated from protobuf field: string barcode = 2; + */ + barcode: string; + /** + * @generated from protobuf field: string name = 3; + */ + name: string; +} +/** + * @generated from protobuf message job_archive.JobArchiveFinishedParam + */ +export interface JobArchiveFinishedParam { +} +/** + * @generated from protobuf message job_archive.JobStateArchive + */ +export interface JobStateArchive { + /** + * @generated from protobuf field: job_archive.JobArchiveStep step = 1; + */ + step: JobArchiveStep; + /** + * @generated from protobuf field: repeated source.SourceState sources = 2; + */ + sources: SourceState[]; +} +/** + * @generated from protobuf message job_archive.JobDisplayArchive + */ +export interface JobDisplayArchive { + /** + * @generated from protobuf field: int64 copyedBytes = 1; + */ + copyedBytes: bigint; + /** + * @generated from protobuf field: int64 copyedFiles = 2; + */ + copyedFiles: bigint; + /** + * @generated from protobuf field: int64 totalBytes = 3; + */ + totalBytes: bigint; + /** + * @generated from protobuf field: int64 totalFiles = 4; + */ + totalFiles: bigint; + /** + * @generated from protobuf field: optional int64 speed = 5; + */ + speed?: bigint; +} +/** + * @generated from protobuf enum job_archive.JobArchiveStep + */ +export enum JobArchiveStep { + /** + * @generated from protobuf enum value: Pending = 0; + */ + Pending = 0, + /** + * @generated from protobuf enum value: WaitForTape = 1; + */ + WaitForTape = 1, + /** + * @generated from protobuf enum value: Copying = 2; + */ + Copying = 2, + /** + * @generated from protobuf enum value: Finished = 255; + */ + Finished = 255 +} +// @generated message type with reflection information, may provide speed optimized methods +class JobParamArchive$Type extends MessageType { + constructor() { + super("job_archive.JobParamArchive", [ + { no: 1, name: "sources", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => Source } + ]); + } + create(value?: PartialMessage): JobParamArchive { + const message = { sources: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobParamArchive): JobParamArchive { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated source.Source sources */ 1: + message.sources.push(Source.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobParamArchive, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated source.Source sources = 1; */ + for (let i = 0; i < message.sources.length; i++) + Source.internalBinaryWrite(message.sources[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message job_archive.JobParamArchive + */ +export const JobParamArchive = new JobParamArchive$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobArchiveNextParam$Type extends MessageType { + constructor() { + super("job_archive.JobArchiveNextParam", [ + { no: 1, name: "WaitForTape", kind: "message", jsonName: "WaitForTape", oneof: "param", T: () => JobArchiveWaitForTapeParam }, + { no: 2, name: "Copying", kind: "message", jsonName: "Copying", oneof: "param", T: () => JobArchiveCopyingParam }, + { no: 255, name: "Finished", kind: "message", jsonName: "Finished", oneof: "param", T: () => JobArchiveFinishedParam } + ]); + } + create(value?: PartialMessage): JobArchiveNextParam { + const message = { param: { oneofKind: undefined } }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobArchiveNextParam): JobArchiveNextParam { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* job_archive.JobArchiveWaitForTapeParam WaitForTape = 1 [json_name = "WaitForTape"];*/ 1: + message.param = { + oneofKind: "waitForTape", + waitForTape: JobArchiveWaitForTapeParam.internalBinaryRead(reader, reader.uint32(), options, (message.param as any).waitForTape) + }; + break; + case /* job_archive.JobArchiveCopyingParam Copying = 2 [json_name = "Copying"];*/ 2: + message.param = { + oneofKind: "copying", + copying: JobArchiveCopyingParam.internalBinaryRead(reader, reader.uint32(), options, (message.param as any).copying) + }; + break; + case /* job_archive.JobArchiveFinishedParam Finished = 255 [json_name = "Finished"];*/ 255: + message.param = { + oneofKind: "finished", + finished: JobArchiveFinishedParam.internalBinaryRead(reader, reader.uint32(), options, (message.param as any).finished) + }; + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobArchiveNextParam, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* job_archive.JobArchiveWaitForTapeParam WaitForTape = 1 [json_name = "WaitForTape"]; */ + if (message.param.oneofKind === "waitForTape") + JobArchiveWaitForTapeParam.internalBinaryWrite(message.param.waitForTape, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* job_archive.JobArchiveCopyingParam Copying = 2 [json_name = "Copying"]; */ + if (message.param.oneofKind === "copying") + JobArchiveCopyingParam.internalBinaryWrite(message.param.copying, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + /* job_archive.JobArchiveFinishedParam Finished = 255 [json_name = "Finished"]; */ + if (message.param.oneofKind === "finished") + JobArchiveFinishedParam.internalBinaryWrite(message.param.finished, writer.tag(255, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message job_archive.JobArchiveNextParam + */ +export const JobArchiveNextParam = new JobArchiveNextParam$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobArchiveWaitForTapeParam$Type extends MessageType { + constructor() { + super("job_archive.JobArchiveWaitForTapeParam", []); + } + create(value?: PartialMessage): JobArchiveWaitForTapeParam { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobArchiveWaitForTapeParam): JobArchiveWaitForTapeParam { + return target ?? this.create(); + } + internalBinaryWrite(message: JobArchiveWaitForTapeParam, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message job_archive.JobArchiveWaitForTapeParam + */ +export const JobArchiveWaitForTapeParam = new JobArchiveWaitForTapeParam$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobArchiveCopyingParam$Type extends MessageType { + constructor() { + super("job_archive.JobArchiveCopyingParam", [ + { no: 1, name: "device", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "barcode", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): JobArchiveCopyingParam { + const message = { device: "", barcode: "", name: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobArchiveCopyingParam): JobArchiveCopyingParam { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string device */ 1: + message.device = reader.string(); + break; + case /* string barcode */ 2: + message.barcode = reader.string(); + break; + case /* string name */ 3: + message.name = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobArchiveCopyingParam, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string device = 1; */ + if (message.device !== "") + writer.tag(1, WireType.LengthDelimited).string(message.device); + /* string barcode = 2; */ + if (message.barcode !== "") + writer.tag(2, WireType.LengthDelimited).string(message.barcode); + /* string name = 3; */ + if (message.name !== "") + writer.tag(3, WireType.LengthDelimited).string(message.name); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message job_archive.JobArchiveCopyingParam + */ +export const JobArchiveCopyingParam = new JobArchiveCopyingParam$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobArchiveFinishedParam$Type extends MessageType { + constructor() { + super("job_archive.JobArchiveFinishedParam", []); + } + create(value?: PartialMessage): JobArchiveFinishedParam { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobArchiveFinishedParam): JobArchiveFinishedParam { + return target ?? this.create(); + } + internalBinaryWrite(message: JobArchiveFinishedParam, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message job_archive.JobArchiveFinishedParam + */ +export const JobArchiveFinishedParam = new JobArchiveFinishedParam$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobStateArchive$Type extends MessageType { + constructor() { + super("job_archive.JobStateArchive", [ + { no: 1, name: "step", kind: "enum", T: () => ["job_archive.JobArchiveStep", JobArchiveStep] }, + { no: 2, name: "sources", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => SourceState } + ]); + } + create(value?: PartialMessage): JobStateArchive { + const message = { step: 0, sources: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobStateArchive): JobStateArchive { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* job_archive.JobArchiveStep step */ 1: + message.step = reader.int32(); + break; + case /* repeated source.SourceState sources */ 2: + message.sources.push(SourceState.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobStateArchive, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* job_archive.JobArchiveStep step = 1; */ + if (message.step !== 0) + writer.tag(1, WireType.Varint).int32(message.step); + /* repeated source.SourceState sources = 2; */ + for (let i = 0; i < message.sources.length; i++) + SourceState.internalBinaryWrite(message.sources[i], writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message job_archive.JobStateArchive + */ +export const JobStateArchive = new JobStateArchive$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobDisplayArchive$Type extends MessageType { + constructor() { + super("job_archive.JobDisplayArchive", [ + { no: 1, name: "copyedBytes", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 2, name: "copyedFiles", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 3, name: "totalBytes", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 4, name: "totalFiles", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 5, name: "speed", kind: "scalar", opt: true, T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ } + ]); + } + create(value?: PartialMessage): JobDisplayArchive { + const message = { copyedBytes: 0n, copyedFiles: 0n, totalBytes: 0n, totalFiles: 0n }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobDisplayArchive): JobDisplayArchive { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 copyedBytes */ 1: + message.copyedBytes = reader.int64().toBigInt(); + break; + case /* int64 copyedFiles */ 2: + message.copyedFiles = reader.int64().toBigInt(); + break; + case /* int64 totalBytes */ 3: + message.totalBytes = reader.int64().toBigInt(); + break; + case /* int64 totalFiles */ 4: + message.totalFiles = reader.int64().toBigInt(); + break; + case /* optional int64 speed */ 5: + message.speed = reader.int64().toBigInt(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobDisplayArchive, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int64 copyedBytes = 1; */ + if (message.copyedBytes !== 0n) + writer.tag(1, WireType.Varint).int64(message.copyedBytes); + /* int64 copyedFiles = 2; */ + if (message.copyedFiles !== 0n) + writer.tag(2, WireType.Varint).int64(message.copyedFiles); + /* int64 totalBytes = 3; */ + if (message.totalBytes !== 0n) + writer.tag(3, WireType.Varint).int64(message.totalBytes); + /* int64 totalFiles = 4; */ + if (message.totalFiles !== 0n) + writer.tag(4, WireType.Varint).int64(message.totalFiles); + /* optional int64 speed = 5; */ + if (message.speed !== undefined) + writer.tag(5, WireType.Varint).int64(message.speed); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message job_archive.JobDisplayArchive + */ +export const JobDisplayArchive = new JobDisplayArchive$Type(); diff --git a/frontend/src/entity/job_restore.ts b/frontend/src/entity/job_restore.ts new file mode 100644 index 0000000..162dac8 --- /dev/null +++ b/frontend/src/entity/job_restore.ts @@ -0,0 +1,583 @@ +// @generated by protobuf-ts 2.8.2 +// @generated from protobuf file "job_restore.proto" (package "job_restore", syntax proto3) +// tslint:disable +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +import { CopyStatus } from "./copy_status"; +/** + * @generated from protobuf message job_restore.JobParamRestore + */ +export interface JobParamRestore { + /** + * @generated from protobuf field: repeated int64 file_ids = 1; + */ + fileIds: bigint[]; +} +/** + * @generated from protobuf message job_restore.JobRestoreNextParam + */ +export interface JobRestoreNextParam { + /** + * @generated from protobuf oneof: param + */ + param: { + oneofKind: "waitForTape"; + /** + * @generated from protobuf field: job_restore.JobRestoreWaitForTapeParam WaitForTape = 1 [json_name = "WaitForTape"]; + */ + waitForTape: JobRestoreWaitForTapeParam; + } | { + oneofKind: "copying"; + /** + * @generated from protobuf field: job_restore.JobRestoreCopyingParam Copying = 2 [json_name = "Copying"]; + */ + copying: JobRestoreCopyingParam; + } | { + oneofKind: "finished"; + /** + * @generated from protobuf field: job_restore.JobRestoreFinishedParam Finished = 255 [json_name = "Finished"]; + */ + finished: JobRestoreFinishedParam; + } | { + oneofKind: undefined; + }; +} +/** + * @generated from protobuf message job_restore.JobRestoreWaitForTapeParam + */ +export interface JobRestoreWaitForTapeParam { +} +/** + * @generated from protobuf message job_restore.JobRestoreCopyingParam + */ +export interface JobRestoreCopyingParam { + /** + * @generated from protobuf field: string device = 1; + */ + device: string; +} +/** + * @generated from protobuf message job_restore.JobRestoreFinishedParam + */ +export interface JobRestoreFinishedParam { +} +/** + * @generated from protobuf message job_restore.FileRestoreState + */ +export interface FileRestoreState { + /** + * @generated from protobuf field: int64 file_id = 1; + */ + fileId: bigint; + /** + * @generated from protobuf field: copy_status.CopyStatus status = 2; + */ + status: CopyStatus; + /** + * @generated from protobuf field: int64 tape_id = 17; + */ + tapeId: bigint; + /** + * @generated from protobuf field: int64 position_id = 18; + */ + positionId: bigint; + /** + * @generated from protobuf field: repeated string path_in_tape = 19; + */ + pathInTape: string[]; +} +/** + * @generated from protobuf message job_restore.JobStateRestore + */ +export interface JobStateRestore { + /** + * @generated from protobuf field: job_restore.JobRestoreStep step = 1; + */ + step: JobRestoreStep; + /** + * @generated from protobuf field: repeated job_restore.FileRestoreState files = 2; + */ + files: FileRestoreState[]; +} +/** + * @generated from protobuf message job_restore.JobDisplayRestore + */ +export interface JobDisplayRestore { + /** + * @generated from protobuf field: int64 copyedBytes = 1; + */ + copyedBytes: bigint; + /** + * @generated from protobuf field: int64 copyedFiles = 2; + */ + copyedFiles: bigint; + /** + * @generated from protobuf field: int64 totalBytes = 3; + */ + totalBytes: bigint; + /** + * @generated from protobuf field: int64 totalFiles = 4; + */ + totalFiles: bigint; + /** + * @generated from protobuf field: bytes logs = 17; + */ + logs: Uint8Array; +} +/** + * @generated from protobuf enum job_restore.JobRestoreStep + */ +export enum JobRestoreStep { + /** + * @generated from protobuf enum value: Pending = 0; + */ + Pending = 0, + /** + * @generated from protobuf enum value: WaitForTape = 1; + */ + WaitForTape = 1, + /** + * @generated from protobuf enum value: Copying = 2; + */ + Copying = 2, + /** + * @generated from protobuf enum value: Finished = 255; + */ + Finished = 255 +} +// @generated message type with reflection information, may provide speed optimized methods +class JobParamRestore$Type extends MessageType { + constructor() { + super("job_restore.JobParamRestore", [ + { no: 1, name: "file_ids", kind: "scalar", repeat: 1 /*RepeatType.PACKED*/, T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ } + ]); + } + create(value?: PartialMessage): JobParamRestore { + const message = { fileIds: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobParamRestore): JobParamRestore { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated int64 file_ids */ 1: + if (wireType === WireType.LengthDelimited) + for (let e = reader.int32() + reader.pos; reader.pos < e;) + message.fileIds.push(reader.int64().toBigInt()); + else + message.fileIds.push(reader.int64().toBigInt()); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobParamRestore, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated int64 file_ids = 1; */ + if (message.fileIds.length) { + writer.tag(1, WireType.LengthDelimited).fork(); + for (let i = 0; i < message.fileIds.length; i++) + writer.int64(message.fileIds[i]); + writer.join(); + } + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message job_restore.JobParamRestore + */ +export const JobParamRestore = new JobParamRestore$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobRestoreNextParam$Type extends MessageType { + constructor() { + super("job_restore.JobRestoreNextParam", [ + { no: 1, name: "WaitForTape", kind: "message", jsonName: "WaitForTape", oneof: "param", T: () => JobRestoreWaitForTapeParam }, + { no: 2, name: "Copying", kind: "message", jsonName: "Copying", oneof: "param", T: () => JobRestoreCopyingParam }, + { no: 255, name: "Finished", kind: "message", jsonName: "Finished", oneof: "param", T: () => JobRestoreFinishedParam } + ]); + } + create(value?: PartialMessage): JobRestoreNextParam { + const message = { param: { oneofKind: undefined } }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobRestoreNextParam): JobRestoreNextParam { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* job_restore.JobRestoreWaitForTapeParam WaitForTape = 1 [json_name = "WaitForTape"];*/ 1: + message.param = { + oneofKind: "waitForTape", + waitForTape: JobRestoreWaitForTapeParam.internalBinaryRead(reader, reader.uint32(), options, (message.param as any).waitForTape) + }; + break; + case /* job_restore.JobRestoreCopyingParam Copying = 2 [json_name = "Copying"];*/ 2: + message.param = { + oneofKind: "copying", + copying: JobRestoreCopyingParam.internalBinaryRead(reader, reader.uint32(), options, (message.param as any).copying) + }; + break; + case /* job_restore.JobRestoreFinishedParam Finished = 255 [json_name = "Finished"];*/ 255: + message.param = { + oneofKind: "finished", + finished: JobRestoreFinishedParam.internalBinaryRead(reader, reader.uint32(), options, (message.param as any).finished) + }; + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobRestoreNextParam, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* job_restore.JobRestoreWaitForTapeParam WaitForTape = 1 [json_name = "WaitForTape"]; */ + if (message.param.oneofKind === "waitForTape") + JobRestoreWaitForTapeParam.internalBinaryWrite(message.param.waitForTape, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* job_restore.JobRestoreCopyingParam Copying = 2 [json_name = "Copying"]; */ + if (message.param.oneofKind === "copying") + JobRestoreCopyingParam.internalBinaryWrite(message.param.copying, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + /* job_restore.JobRestoreFinishedParam Finished = 255 [json_name = "Finished"]; */ + if (message.param.oneofKind === "finished") + JobRestoreFinishedParam.internalBinaryWrite(message.param.finished, writer.tag(255, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message job_restore.JobRestoreNextParam + */ +export const JobRestoreNextParam = new JobRestoreNextParam$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobRestoreWaitForTapeParam$Type extends MessageType { + constructor() { + super("job_restore.JobRestoreWaitForTapeParam", []); + } + create(value?: PartialMessage): JobRestoreWaitForTapeParam { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobRestoreWaitForTapeParam): JobRestoreWaitForTapeParam { + return target ?? this.create(); + } + internalBinaryWrite(message: JobRestoreWaitForTapeParam, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message job_restore.JobRestoreWaitForTapeParam + */ +export const JobRestoreWaitForTapeParam = new JobRestoreWaitForTapeParam$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobRestoreCopyingParam$Type extends MessageType { + constructor() { + super("job_restore.JobRestoreCopyingParam", [ + { no: 1, name: "device", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): JobRestoreCopyingParam { + const message = { device: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobRestoreCopyingParam): JobRestoreCopyingParam { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string device */ 1: + message.device = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobRestoreCopyingParam, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string device = 1; */ + if (message.device !== "") + writer.tag(1, WireType.LengthDelimited).string(message.device); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message job_restore.JobRestoreCopyingParam + */ +export const JobRestoreCopyingParam = new JobRestoreCopyingParam$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobRestoreFinishedParam$Type extends MessageType { + constructor() { + super("job_restore.JobRestoreFinishedParam", []); + } + create(value?: PartialMessage): JobRestoreFinishedParam { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobRestoreFinishedParam): JobRestoreFinishedParam { + return target ?? this.create(); + } + internalBinaryWrite(message: JobRestoreFinishedParam, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message job_restore.JobRestoreFinishedParam + */ +export const JobRestoreFinishedParam = new JobRestoreFinishedParam$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class FileRestoreState$Type extends MessageType { + constructor() { + super("job_restore.FileRestoreState", [ + { no: 1, name: "file_id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 2, name: "status", kind: "enum", T: () => ["copy_status.CopyStatus", CopyStatus] }, + { no: 17, name: "tape_id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 18, name: "position_id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 19, name: "path_in_tape", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): FileRestoreState { + const message = { fileId: 0n, status: 0, tapeId: 0n, positionId: 0n, pathInTape: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FileRestoreState): FileRestoreState { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 file_id */ 1: + message.fileId = reader.int64().toBigInt(); + break; + case /* copy_status.CopyStatus status */ 2: + message.status = reader.int32(); + break; + case /* int64 tape_id */ 17: + message.tapeId = reader.int64().toBigInt(); + break; + case /* int64 position_id */ 18: + message.positionId = reader.int64().toBigInt(); + break; + case /* repeated string path_in_tape */ 19: + message.pathInTape.push(reader.string()); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: FileRestoreState, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int64 file_id = 1; */ + if (message.fileId !== 0n) + writer.tag(1, WireType.Varint).int64(message.fileId); + /* copy_status.CopyStatus status = 2; */ + if (message.status !== 0) + writer.tag(2, WireType.Varint).int32(message.status); + /* int64 tape_id = 17; */ + if (message.tapeId !== 0n) + writer.tag(17, WireType.Varint).int64(message.tapeId); + /* int64 position_id = 18; */ + if (message.positionId !== 0n) + writer.tag(18, WireType.Varint).int64(message.positionId); + /* repeated string path_in_tape = 19; */ + for (let i = 0; i < message.pathInTape.length; i++) + writer.tag(19, WireType.LengthDelimited).string(message.pathInTape[i]); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message job_restore.FileRestoreState + */ +export const FileRestoreState = new FileRestoreState$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobStateRestore$Type extends MessageType { + constructor() { + super("job_restore.JobStateRestore", [ + { no: 1, name: "step", kind: "enum", T: () => ["job_restore.JobRestoreStep", JobRestoreStep] }, + { no: 2, name: "files", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => FileRestoreState } + ]); + } + create(value?: PartialMessage): JobStateRestore { + const message = { step: 0, files: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobStateRestore): JobStateRestore { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* job_restore.JobRestoreStep step */ 1: + message.step = reader.int32(); + break; + case /* repeated job_restore.FileRestoreState files */ 2: + message.files.push(FileRestoreState.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobStateRestore, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* job_restore.JobRestoreStep step = 1; */ + if (message.step !== 0) + writer.tag(1, WireType.Varint).int32(message.step); + /* repeated job_restore.FileRestoreState files = 2; */ + for (let i = 0; i < message.files.length; i++) + FileRestoreState.internalBinaryWrite(message.files[i], writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message job_restore.JobStateRestore + */ +export const JobStateRestore = new JobStateRestore$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobDisplayRestore$Type extends MessageType { + constructor() { + super("job_restore.JobDisplayRestore", [ + { no: 1, name: "copyedBytes", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 2, name: "copyedFiles", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 3, name: "totalBytes", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 4, name: "totalFiles", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 17, name: "logs", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): JobDisplayRestore { + const message = { copyedBytes: 0n, copyedFiles: 0n, totalBytes: 0n, totalFiles: 0n, logs: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobDisplayRestore): JobDisplayRestore { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 copyedBytes */ 1: + message.copyedBytes = reader.int64().toBigInt(); + break; + case /* int64 copyedFiles */ 2: + message.copyedFiles = reader.int64().toBigInt(); + break; + case /* int64 totalBytes */ 3: + message.totalBytes = reader.int64().toBigInt(); + break; + case /* int64 totalFiles */ 4: + message.totalFiles = reader.int64().toBigInt(); + break; + case /* bytes logs */ 17: + message.logs = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobDisplayRestore, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int64 copyedBytes = 1; */ + if (message.copyedBytes !== 0n) + writer.tag(1, WireType.Varint).int64(message.copyedBytes); + /* int64 copyedFiles = 2; */ + if (message.copyedFiles !== 0n) + writer.tag(2, WireType.Varint).int64(message.copyedFiles); + /* int64 totalBytes = 3; */ + if (message.totalBytes !== 0n) + writer.tag(3, WireType.Varint).int64(message.totalBytes); + /* int64 totalFiles = 4; */ + if (message.totalFiles !== 0n) + writer.tag(4, WireType.Varint).int64(message.totalFiles); + /* bytes logs = 17; */ + if (message.logs.length) + writer.tag(17, WireType.LengthDelimited).bytes(message.logs); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message job_restore.JobDisplayRestore + */ +export const JobDisplayRestore = new JobDisplayRestore$Type(); diff --git a/frontend/src/entity/position.ts b/frontend/src/entity/position.ts new file mode 100644 index 0000000..20e3ae4 --- /dev/null +++ b/frontend/src/entity/position.ts @@ -0,0 +1,157 @@ +// @generated by protobuf-ts 2.8.2 +// @generated from protobuf file "position.proto" (package "position", syntax proto3) +// tslint:disable +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +/** + * @generated from protobuf message position.Position + */ +export interface Position { + /** + * @generated from protobuf field: int64 id = 1; + */ + id: bigint; + /** + * @generated from protobuf field: int64 file_id = 2; + */ + fileId: bigint; + /** + * @generated from protobuf field: int64 tape_id = 3; + */ + tapeId: bigint; + /** + * @generated from protobuf field: string path = 4; + */ + path: string; + /** + * @generated from protobuf field: int64 mode = 17; + */ + mode: bigint; + /** + * @generated from protobuf field: int64 mod_time = 18; + */ + modTime: bigint; + /** + * @generated from protobuf field: int64 write_time = 19; + */ + writeTime: bigint; + /** + * @generated from protobuf field: int64 size = 20; + */ + size: bigint; + /** + * @generated from protobuf field: bytes hash = 21; + */ + hash: Uint8Array; +} +// @generated message type with reflection information, may provide speed optimized methods +class Position$Type extends MessageType { + constructor() { + super("position.Position", [ + { no: 1, name: "id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 2, name: "file_id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 3, name: "tape_id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 4, name: "path", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 17, name: "mode", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 18, name: "mod_time", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 19, name: "write_time", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 20, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 21, name: "hash", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): Position { + const message = { id: 0n, fileId: 0n, tapeId: 0n, path: "", mode: 0n, modTime: 0n, writeTime: 0n, size: 0n, hash: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Position): Position { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 id */ 1: + message.id = reader.int64().toBigInt(); + break; + case /* int64 file_id */ 2: + message.fileId = reader.int64().toBigInt(); + break; + case /* int64 tape_id */ 3: + message.tapeId = reader.int64().toBigInt(); + break; + case /* string path */ 4: + message.path = reader.string(); + break; + case /* int64 mode */ 17: + message.mode = reader.int64().toBigInt(); + break; + case /* int64 mod_time */ 18: + message.modTime = reader.int64().toBigInt(); + break; + case /* int64 write_time */ 19: + message.writeTime = reader.int64().toBigInt(); + break; + case /* int64 size */ 20: + message.size = reader.int64().toBigInt(); + break; + case /* bytes hash */ 21: + message.hash = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Position, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int64 id = 1; */ + if (message.id !== 0n) + writer.tag(1, WireType.Varint).int64(message.id); + /* int64 file_id = 2; */ + if (message.fileId !== 0n) + writer.tag(2, WireType.Varint).int64(message.fileId); + /* int64 tape_id = 3; */ + if (message.tapeId !== 0n) + writer.tag(3, WireType.Varint).int64(message.tapeId); + /* string path = 4; */ + if (message.path !== "") + writer.tag(4, WireType.LengthDelimited).string(message.path); + /* int64 mode = 17; */ + if (message.mode !== 0n) + writer.tag(17, WireType.Varint).int64(message.mode); + /* int64 mod_time = 18; */ + if (message.modTime !== 0n) + writer.tag(18, WireType.Varint).int64(message.modTime); + /* int64 write_time = 19; */ + if (message.writeTime !== 0n) + writer.tag(19, WireType.Varint).int64(message.writeTime); + /* int64 size = 20; */ + if (message.size !== 0n) + writer.tag(20, WireType.Varint).int64(message.size); + /* bytes hash = 21; */ + if (message.hash.length) + writer.tag(21, WireType.LengthDelimited).bytes(message.hash); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message position.Position + */ +export const Position = new Position$Type(); diff --git a/frontend/src/entity/service.client.ts b/frontend/src/entity/service.client.ts new file mode 100644 index 0000000..5e67583 --- /dev/null +++ b/frontend/src/entity/service.client.ts @@ -0,0 +1,193 @@ +// @generated by protobuf-ts 2.8.2 +// @generated from protobuf file "service.proto" (package "service", syntax proto3) +// tslint:disable +import type { RpcTransport } from "@protobuf-ts/runtime-rpc"; +import type { ServiceInfo } from "@protobuf-ts/runtime-rpc"; +import { Service } from "./service"; +import type { DeviceListReply } from "./service"; +import type { DeviceListRequest } from "./service"; +import type { SourceListReply } from "./service"; +import type { SourceListRequest } from "./service"; +import type { JobGetLogReply } from "./service"; +import type { JobGetLogRequest } from "./service"; +import type { JobDisplayReply } from "./service"; +import type { JobDisplayRequest } from "./service"; +import type { JobNextReply } from "./service"; +import type { JobNextRequest } from "./service"; +import type { JobCreateReply } from "./service"; +import type { JobCreateRequest } from "./service"; +import type { JobListReply } from "./service"; +import type { JobListRequest } from "./service"; +import type { TapeMGetReply } from "./service"; +import type { TapeMGetRequest } from "./service"; +import type { FileListParentsReply } from "./service"; +import type { FileListParentsRequest } from "./service"; +import type { FileDeleteReply } from "./service"; +import type { FileDeleteRequest } from "./service"; +import type { FileMkdirReply } from "./service"; +import type { FileMkdirRequest } from "./service"; +import type { FileEditReply } from "./service"; +import type { FileEditRequest } from "./service"; +import { stackIntercept } from "@protobuf-ts/runtime-rpc"; +import type { FileGetReply } from "./service"; +import type { FileGetRequest } from "./service"; +import type { UnaryCall } from "@protobuf-ts/runtime-rpc"; +import type { RpcOptions } from "@protobuf-ts/runtime-rpc"; +/** + * @generated from protobuf service service.Service + */ +export interface IServiceClient { + /** + * @generated from protobuf rpc: FileGet(service.FileGetRequest) returns (service.FileGetReply); + */ + fileGet(input: FileGetRequest, options?: RpcOptions): UnaryCall; + /** + * @generated from protobuf rpc: FileEdit(service.FileEditRequest) returns (service.FileEditReply); + */ + fileEdit(input: FileEditRequest, options?: RpcOptions): UnaryCall; + /** + * @generated from protobuf rpc: FileMkdir(service.FileMkdirRequest) returns (service.FileMkdirReply); + */ + fileMkdir(input: FileMkdirRequest, options?: RpcOptions): UnaryCall; + /** + * @generated from protobuf rpc: FileDelete(service.FileDeleteRequest) returns (service.FileDeleteReply); + */ + fileDelete(input: FileDeleteRequest, options?: RpcOptions): UnaryCall; + /** + * @generated from protobuf rpc: FileListParents(service.FileListParentsRequest) returns (service.FileListParentsReply); + */ + fileListParents(input: FileListParentsRequest, options?: RpcOptions): UnaryCall; + /** + * @generated from protobuf rpc: TapeMGet(service.TapeMGetRequest) returns (service.TapeMGetReply); + */ + tapeMGet(input: TapeMGetRequest, options?: RpcOptions): UnaryCall; + /** + * @generated from protobuf rpc: JobList(service.JobListRequest) returns (service.JobListReply); + */ + jobList(input: JobListRequest, options?: RpcOptions): UnaryCall; + /** + * @generated from protobuf rpc: JobCreate(service.JobCreateRequest) returns (service.JobCreateReply); + */ + jobCreate(input: JobCreateRequest, options?: RpcOptions): UnaryCall; + /** + * @generated from protobuf rpc: JobNext(service.JobNextRequest) returns (service.JobNextReply); + */ + jobNext(input: JobNextRequest, options?: RpcOptions): UnaryCall; + /** + * @generated from protobuf rpc: JobDisplay(service.JobDisplayRequest) returns (service.JobDisplayReply); + */ + jobDisplay(input: JobDisplayRequest, options?: RpcOptions): UnaryCall; + /** + * @generated from protobuf rpc: JobGetLog(service.JobGetLogRequest) returns (service.JobGetLogReply); + */ + jobGetLog(input: JobGetLogRequest, options?: RpcOptions): UnaryCall; + /** + * @generated from protobuf rpc: SourceList(service.SourceListRequest) returns (service.SourceListReply); + */ + sourceList(input: SourceListRequest, options?: RpcOptions): UnaryCall; + /** + * @generated from protobuf rpc: DeviceList(service.DeviceListRequest) returns (service.DeviceListReply); + */ + deviceList(input: DeviceListRequest, options?: RpcOptions): UnaryCall; +} +/** + * @generated from protobuf service service.Service + */ +export class ServiceClient implements IServiceClient, ServiceInfo { + typeName = Service.typeName; + methods = Service.methods; + options = Service.options; + constructor(private readonly _transport: RpcTransport) { + } + /** + * @generated from protobuf rpc: FileGet(service.FileGetRequest) returns (service.FileGetReply); + */ + fileGet(input: FileGetRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[0], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } + /** + * @generated from protobuf rpc: FileEdit(service.FileEditRequest) returns (service.FileEditReply); + */ + fileEdit(input: FileEditRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[1], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } + /** + * @generated from protobuf rpc: FileMkdir(service.FileMkdirRequest) returns (service.FileMkdirReply); + */ + fileMkdir(input: FileMkdirRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[2], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } + /** + * @generated from protobuf rpc: FileDelete(service.FileDeleteRequest) returns (service.FileDeleteReply); + */ + fileDelete(input: FileDeleteRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[3], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } + /** + * @generated from protobuf rpc: FileListParents(service.FileListParentsRequest) returns (service.FileListParentsReply); + */ + fileListParents(input: FileListParentsRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[4], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } + /** + * @generated from protobuf rpc: TapeMGet(service.TapeMGetRequest) returns (service.TapeMGetReply); + */ + tapeMGet(input: TapeMGetRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[5], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } + /** + * @generated from protobuf rpc: JobList(service.JobListRequest) returns (service.JobListReply); + */ + jobList(input: JobListRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[6], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } + /** + * @generated from protobuf rpc: JobCreate(service.JobCreateRequest) returns (service.JobCreateReply); + */ + jobCreate(input: JobCreateRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[7], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } + /** + * @generated from protobuf rpc: JobNext(service.JobNextRequest) returns (service.JobNextReply); + */ + jobNext(input: JobNextRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[8], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } + /** + * @generated from protobuf rpc: JobDisplay(service.JobDisplayRequest) returns (service.JobDisplayReply); + */ + jobDisplay(input: JobDisplayRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[9], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } + /** + * @generated from protobuf rpc: JobGetLog(service.JobGetLogRequest) returns (service.JobGetLogReply); + */ + jobGetLog(input: JobGetLogRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[10], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } + /** + * @generated from protobuf rpc: SourceList(service.SourceListRequest) returns (service.SourceListReply); + */ + sourceList(input: SourceListRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[11], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } + /** + * @generated from protobuf rpc: DeviceList(service.DeviceListRequest) returns (service.DeviceListReply); + */ + deviceList(input: DeviceListRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[12], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } +} diff --git a/frontend/src/entity/service.ts b/frontend/src/entity/service.ts new file mode 100644 index 0000000..29a1d4e --- /dev/null +++ b/frontend/src/entity/service.ts @@ -0,0 +1,1643 @@ +// @generated by protobuf-ts 2.8.2 +// @generated from protobuf file "service.proto" (package "service", syntax proto3) +// tslint:disable +import { ServiceType } from "@protobuf-ts/runtime-rpc"; +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +import { SourceFile } from "./source"; +import { JobDisplay } from "./job"; +import { JobNextParam } from "./job"; +import { CreatableJob } from "./job"; +import { Job } from "./job"; +import { JobFilter } from "./job"; +import { Tape } from "./tape"; +import { EditedFile } from "./file"; +import { Position } from "./position"; +import { File } from "./file"; +/** + * @generated from protobuf message service.FileGetRequest + */ +export interface FileGetRequest { + /** + * @generated from protobuf field: int64 id = 1; + */ + id: bigint; +} +/** + * @generated from protobuf message service.FileGetReply + */ +export interface FileGetReply { + /** + * @generated from protobuf field: optional file.File file = 1; + */ + file?: File; + /** + * @generated from protobuf field: repeated position.Position positions = 2; + */ + positions: Position[]; + /** + * @generated from protobuf field: repeated file.File children = 17; + */ + children: File[]; +} +/** + * @generated from protobuf message service.FileEditRequest + */ +export interface FileEditRequest { + /** + * @generated from protobuf field: int64 id = 1; + */ + id: bigint; + /** + * @generated from protobuf field: file.EditedFile file = 2; + */ + file?: EditedFile; +} +/** + * @generated from protobuf message service.FileEditReply + */ +export interface FileEditReply { + /** + * @generated from protobuf field: file.File file = 1; + */ + file?: File; +} +/** + * @generated from protobuf message service.FileMkdirRequest + */ +export interface FileMkdirRequest { + /** + * @generated from protobuf field: int64 parent_id = 1; + */ + parentId: bigint; + /** + * @generated from protobuf field: string path = 2; + */ + path: string; +} +/** + * @generated from protobuf message service.FileMkdirReply + */ +export interface FileMkdirReply { + /** + * @generated from protobuf field: file.File file = 1; + */ + file?: File; +} +/** + * @generated from protobuf message service.FileDeleteRequest + */ +export interface FileDeleteRequest { + /** + * @generated from protobuf field: repeated int64 ids = 1; + */ + ids: bigint[]; +} +/** + * @generated from protobuf message service.FileDeleteReply + */ +export interface FileDeleteReply { +} +/** + * @generated from protobuf message service.FileListParentsRequest + */ +export interface FileListParentsRequest { + /** + * @generated from protobuf field: int64 id = 1; + */ + id: bigint; +} +/** + * @generated from protobuf message service.FileListParentsReply + */ +export interface FileListParentsReply { + /** + * @generated from protobuf field: repeated file.File parents = 1; + */ + parents: File[]; +} +/** + * @generated from protobuf message service.TapeMGetRequest + */ +export interface TapeMGetRequest { + /** + * @generated from protobuf field: repeated int64 ids = 1; + */ + ids: bigint[]; +} +/** + * @generated from protobuf message service.TapeMGetReply + */ +export interface TapeMGetReply { + /** + * @generated from protobuf field: repeated tape.Tape tapes = 1; + */ + tapes: Tape[]; +} +/** + * @generated from protobuf message service.JobListRequest + */ +export interface JobListRequest { + /** + * @generated from protobuf oneof: param + */ + param: { + oneofKind: "mget"; + /** + * @generated from protobuf field: service.JobMGetRequest mget = 1; + */ + mget: JobMGetRequest; + } | { + oneofKind: "list"; + /** + * @generated from protobuf field: job.JobFilter list = 2; + */ + list: JobFilter; + } | { + oneofKind: undefined; + }; +} +/** + * @generated from protobuf message service.JobMGetRequest + */ +export interface JobMGetRequest { + /** + * @generated from protobuf field: repeated int64 ids = 1; + */ + ids: bigint[]; +} +/** + * @generated from protobuf message service.JobListReply + */ +export interface JobListReply { + /** + * @generated from protobuf field: repeated job.Job jobs = 1; + */ + jobs: Job[]; +} +/** + * @generated from protobuf message service.JobCreateRequest + */ +export interface JobCreateRequest { + /** + * @generated from protobuf field: job.CreatableJob job = 1; + */ + job?: CreatableJob; +} +/** + * @generated from protobuf message service.JobCreateReply + */ +export interface JobCreateReply { + /** + * @generated from protobuf field: job.Job job = 1; + */ + job?: Job; +} +/** + * @generated from protobuf message service.JobNextRequest + */ +export interface JobNextRequest { + /** + * @generated from protobuf field: int64 id = 1; + */ + id: bigint; + /** + * @generated from protobuf field: job.JobNextParam param = 2; + */ + param?: JobNextParam; +} +/** + * @generated from protobuf message service.JobNextReply + */ +export interface JobNextReply { + /** + * @generated from protobuf field: job.Job job = 1; + */ + job?: Job; +} +/** + * @generated from protobuf message service.JobDisplayRequest + */ +export interface JobDisplayRequest { + /** + * @generated from protobuf field: int64 id = 1; + */ + id: bigint; +} +/** + * @generated from protobuf message service.JobDisplayReply + */ +export interface JobDisplayReply { + /** + * @generated from protobuf field: job.JobDisplay display = 1; + */ + display?: JobDisplay; +} +/** + * @generated from protobuf message service.JobGetLogRequest + */ +export interface JobGetLogRequest { + /** + * @generated from protobuf field: int64 job_id = 1; + */ + jobId: bigint; + /** + * @generated from protobuf field: optional int64 offset = 2; + */ + offset?: bigint; +} +/** + * @generated from protobuf message service.JobGetLogReply + */ +export interface JobGetLogReply { + /** + * @generated from protobuf field: bytes logs = 1; + */ + logs: Uint8Array; +} +/** + * @generated from protobuf message service.SourceListRequest + */ +export interface SourceListRequest { + /** + * @generated from protobuf field: string path = 1; + */ + path: string; +} +/** + * @generated from protobuf message service.SourceListReply + */ +export interface SourceListReply { + /** + * @generated from protobuf field: source.SourceFile file = 1; + */ + file?: SourceFile; + /** + * @generated from protobuf field: repeated source.SourceFile chain = 2; + */ + chain: SourceFile[]; + /** + * @generated from protobuf field: repeated source.SourceFile children = 17; + */ + children: SourceFile[]; +} +/** + * @generated from protobuf message service.DeviceListRequest + */ +export interface DeviceListRequest { +} +/** + * @generated from protobuf message service.DeviceListReply + */ +export interface DeviceListReply { + /** + * @generated from protobuf field: repeated string devices = 1; + */ + devices: string[]; +} +// @generated message type with reflection information, may provide speed optimized methods +class FileGetRequest$Type extends MessageType { + constructor() { + super("service.FileGetRequest", [ + { no: 1, name: "id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ } + ]); + } + create(value?: PartialMessage): FileGetRequest { + const message = { id: 0n }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FileGetRequest): FileGetRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 id */ 1: + message.id = reader.int64().toBigInt(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: FileGetRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int64 id = 1; */ + if (message.id !== 0n) + writer.tag(1, WireType.Varint).int64(message.id); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.FileGetRequest + */ +export const FileGetRequest = new FileGetRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class FileGetReply$Type extends MessageType { + constructor() { + super("service.FileGetReply", [ + { no: 1, name: "file", kind: "message", T: () => File }, + { no: 2, name: "positions", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => Position }, + { no: 17, name: "children", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => File } + ]); + } + create(value?: PartialMessage): FileGetReply { + const message = { positions: [], children: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FileGetReply): FileGetReply { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* optional file.File file */ 1: + message.file = File.internalBinaryRead(reader, reader.uint32(), options, message.file); + break; + case /* repeated position.Position positions */ 2: + message.positions.push(Position.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* repeated file.File children */ 17: + message.children.push(File.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: FileGetReply, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* optional file.File file = 1; */ + if (message.file) + File.internalBinaryWrite(message.file, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* repeated position.Position positions = 2; */ + for (let i = 0; i < message.positions.length; i++) + Position.internalBinaryWrite(message.positions[i], writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + /* repeated file.File children = 17; */ + for (let i = 0; i < message.children.length; i++) + File.internalBinaryWrite(message.children[i], writer.tag(17, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.FileGetReply + */ +export const FileGetReply = new FileGetReply$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class FileEditRequest$Type extends MessageType { + constructor() { + super("service.FileEditRequest", [ + { no: 1, name: "id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 2, name: "file", kind: "message", T: () => EditedFile } + ]); + } + create(value?: PartialMessage): FileEditRequest { + const message = { id: 0n }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FileEditRequest): FileEditRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 id */ 1: + message.id = reader.int64().toBigInt(); + break; + case /* file.EditedFile file */ 2: + message.file = EditedFile.internalBinaryRead(reader, reader.uint32(), options, message.file); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: FileEditRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int64 id = 1; */ + if (message.id !== 0n) + writer.tag(1, WireType.Varint).int64(message.id); + /* file.EditedFile file = 2; */ + if (message.file) + EditedFile.internalBinaryWrite(message.file, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.FileEditRequest + */ +export const FileEditRequest = new FileEditRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class FileEditReply$Type extends MessageType { + constructor() { + super("service.FileEditReply", [ + { no: 1, name: "file", kind: "message", T: () => File } + ]); + } + create(value?: PartialMessage): FileEditReply { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FileEditReply): FileEditReply { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* file.File file */ 1: + message.file = File.internalBinaryRead(reader, reader.uint32(), options, message.file); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: FileEditReply, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* file.File file = 1; */ + if (message.file) + File.internalBinaryWrite(message.file, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.FileEditReply + */ +export const FileEditReply = new FileEditReply$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class FileMkdirRequest$Type extends MessageType { + constructor() { + super("service.FileMkdirRequest", [ + { no: 1, name: "parent_id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 2, name: "path", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): FileMkdirRequest { + const message = { parentId: 0n, path: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FileMkdirRequest): FileMkdirRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 parent_id */ 1: + message.parentId = reader.int64().toBigInt(); + break; + case /* string path */ 2: + message.path = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: FileMkdirRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int64 parent_id = 1; */ + if (message.parentId !== 0n) + writer.tag(1, WireType.Varint).int64(message.parentId); + /* string path = 2; */ + if (message.path !== "") + writer.tag(2, WireType.LengthDelimited).string(message.path); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.FileMkdirRequest + */ +export const FileMkdirRequest = new FileMkdirRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class FileMkdirReply$Type extends MessageType { + constructor() { + super("service.FileMkdirReply", [ + { no: 1, name: "file", kind: "message", T: () => File } + ]); + } + create(value?: PartialMessage): FileMkdirReply { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FileMkdirReply): FileMkdirReply { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* file.File file */ 1: + message.file = File.internalBinaryRead(reader, reader.uint32(), options, message.file); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: FileMkdirReply, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* file.File file = 1; */ + if (message.file) + File.internalBinaryWrite(message.file, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.FileMkdirReply + */ +export const FileMkdirReply = new FileMkdirReply$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class FileDeleteRequest$Type extends MessageType { + constructor() { + super("service.FileDeleteRequest", [ + { no: 1, name: "ids", kind: "scalar", repeat: 1 /*RepeatType.PACKED*/, T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ } + ]); + } + create(value?: PartialMessage): FileDeleteRequest { + const message = { ids: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FileDeleteRequest): FileDeleteRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated int64 ids */ 1: + if (wireType === WireType.LengthDelimited) + for (let e = reader.int32() + reader.pos; reader.pos < e;) + message.ids.push(reader.int64().toBigInt()); + else + message.ids.push(reader.int64().toBigInt()); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: FileDeleteRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated int64 ids = 1; */ + if (message.ids.length) { + writer.tag(1, WireType.LengthDelimited).fork(); + for (let i = 0; i < message.ids.length; i++) + writer.int64(message.ids[i]); + writer.join(); + } + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.FileDeleteRequest + */ +export const FileDeleteRequest = new FileDeleteRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class FileDeleteReply$Type extends MessageType { + constructor() { + super("service.FileDeleteReply", []); + } + create(value?: PartialMessage): FileDeleteReply { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FileDeleteReply): FileDeleteReply { + return target ?? this.create(); + } + internalBinaryWrite(message: FileDeleteReply, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.FileDeleteReply + */ +export const FileDeleteReply = new FileDeleteReply$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class FileListParentsRequest$Type extends MessageType { + constructor() { + super("service.FileListParentsRequest", [ + { no: 1, name: "id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ } + ]); + } + create(value?: PartialMessage): FileListParentsRequest { + const message = { id: 0n }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FileListParentsRequest): FileListParentsRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 id */ 1: + message.id = reader.int64().toBigInt(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: FileListParentsRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int64 id = 1; */ + if (message.id !== 0n) + writer.tag(1, WireType.Varint).int64(message.id); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.FileListParentsRequest + */ +export const FileListParentsRequest = new FileListParentsRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class FileListParentsReply$Type extends MessageType { + constructor() { + super("service.FileListParentsReply", [ + { no: 1, name: "parents", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => File } + ]); + } + create(value?: PartialMessage): FileListParentsReply { + const message = { parents: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FileListParentsReply): FileListParentsReply { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated file.File parents */ 1: + message.parents.push(File.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: FileListParentsReply, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated file.File parents = 1; */ + for (let i = 0; i < message.parents.length; i++) + File.internalBinaryWrite(message.parents[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.FileListParentsReply + */ +export const FileListParentsReply = new FileListParentsReply$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class TapeMGetRequest$Type extends MessageType { + constructor() { + super("service.TapeMGetRequest", [ + { no: 1, name: "ids", kind: "scalar", repeat: 1 /*RepeatType.PACKED*/, T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ } + ]); + } + create(value?: PartialMessage): TapeMGetRequest { + const message = { ids: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: TapeMGetRequest): TapeMGetRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated int64 ids */ 1: + if (wireType === WireType.LengthDelimited) + for (let e = reader.int32() + reader.pos; reader.pos < e;) + message.ids.push(reader.int64().toBigInt()); + else + message.ids.push(reader.int64().toBigInt()); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: TapeMGetRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated int64 ids = 1; */ + if (message.ids.length) { + writer.tag(1, WireType.LengthDelimited).fork(); + for (let i = 0; i < message.ids.length; i++) + writer.int64(message.ids[i]); + writer.join(); + } + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.TapeMGetRequest + */ +export const TapeMGetRequest = new TapeMGetRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class TapeMGetReply$Type extends MessageType { + constructor() { + super("service.TapeMGetReply", [ + { no: 1, name: "tapes", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => Tape } + ]); + } + create(value?: PartialMessage): TapeMGetReply { + const message = { tapes: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: TapeMGetReply): TapeMGetReply { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated tape.Tape tapes */ 1: + message.tapes.push(Tape.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: TapeMGetReply, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated tape.Tape tapes = 1; */ + for (let i = 0; i < message.tapes.length; i++) + Tape.internalBinaryWrite(message.tapes[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.TapeMGetReply + */ +export const TapeMGetReply = new TapeMGetReply$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobListRequest$Type extends MessageType { + constructor() { + super("service.JobListRequest", [ + { no: 1, name: "mget", kind: "message", oneof: "param", T: () => JobMGetRequest }, + { no: 2, name: "list", kind: "message", oneof: "param", T: () => JobFilter } + ]); + } + create(value?: PartialMessage): JobListRequest { + const message = { param: { oneofKind: undefined } }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobListRequest): JobListRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* service.JobMGetRequest mget */ 1: + message.param = { + oneofKind: "mget", + mget: JobMGetRequest.internalBinaryRead(reader, reader.uint32(), options, (message.param as any).mget) + }; + break; + case /* job.JobFilter list */ 2: + message.param = { + oneofKind: "list", + list: JobFilter.internalBinaryRead(reader, reader.uint32(), options, (message.param as any).list) + }; + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobListRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* service.JobMGetRequest mget = 1; */ + if (message.param.oneofKind === "mget") + JobMGetRequest.internalBinaryWrite(message.param.mget, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* job.JobFilter list = 2; */ + if (message.param.oneofKind === "list") + JobFilter.internalBinaryWrite(message.param.list, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.JobListRequest + */ +export const JobListRequest = new JobListRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobMGetRequest$Type extends MessageType { + constructor() { + super("service.JobMGetRequest", [ + { no: 1, name: "ids", kind: "scalar", repeat: 1 /*RepeatType.PACKED*/, T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ } + ]); + } + create(value?: PartialMessage): JobMGetRequest { + const message = { ids: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobMGetRequest): JobMGetRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated int64 ids */ 1: + if (wireType === WireType.LengthDelimited) + for (let e = reader.int32() + reader.pos; reader.pos < e;) + message.ids.push(reader.int64().toBigInt()); + else + message.ids.push(reader.int64().toBigInt()); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobMGetRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated int64 ids = 1; */ + if (message.ids.length) { + writer.tag(1, WireType.LengthDelimited).fork(); + for (let i = 0; i < message.ids.length; i++) + writer.int64(message.ids[i]); + writer.join(); + } + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.JobMGetRequest + */ +export const JobMGetRequest = new JobMGetRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobListReply$Type extends MessageType { + constructor() { + super("service.JobListReply", [ + { no: 1, name: "jobs", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => Job } + ]); + } + create(value?: PartialMessage): JobListReply { + const message = { jobs: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobListReply): JobListReply { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated job.Job jobs */ 1: + message.jobs.push(Job.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobListReply, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated job.Job jobs = 1; */ + for (let i = 0; i < message.jobs.length; i++) + Job.internalBinaryWrite(message.jobs[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.JobListReply + */ +export const JobListReply = new JobListReply$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobCreateRequest$Type extends MessageType { + constructor() { + super("service.JobCreateRequest", [ + { no: 1, name: "job", kind: "message", T: () => CreatableJob } + ]); + } + create(value?: PartialMessage): JobCreateRequest { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobCreateRequest): JobCreateRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* job.CreatableJob job */ 1: + message.job = CreatableJob.internalBinaryRead(reader, reader.uint32(), options, message.job); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobCreateRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* job.CreatableJob job = 1; */ + if (message.job) + CreatableJob.internalBinaryWrite(message.job, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.JobCreateRequest + */ +export const JobCreateRequest = new JobCreateRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobCreateReply$Type extends MessageType { + constructor() { + super("service.JobCreateReply", [ + { no: 1, name: "job", kind: "message", T: () => Job } + ]); + } + create(value?: PartialMessage): JobCreateReply { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobCreateReply): JobCreateReply { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* job.Job job */ 1: + message.job = Job.internalBinaryRead(reader, reader.uint32(), options, message.job); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobCreateReply, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* job.Job job = 1; */ + if (message.job) + Job.internalBinaryWrite(message.job, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.JobCreateReply + */ +export const JobCreateReply = new JobCreateReply$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobNextRequest$Type extends MessageType { + constructor() { + super("service.JobNextRequest", [ + { no: 1, name: "id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 2, name: "param", kind: "message", T: () => JobNextParam } + ]); + } + create(value?: PartialMessage): JobNextRequest { + const message = { id: 0n }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobNextRequest): JobNextRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 id */ 1: + message.id = reader.int64().toBigInt(); + break; + case /* job.JobNextParam param */ 2: + message.param = JobNextParam.internalBinaryRead(reader, reader.uint32(), options, message.param); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobNextRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int64 id = 1; */ + if (message.id !== 0n) + writer.tag(1, WireType.Varint).int64(message.id); + /* job.JobNextParam param = 2; */ + if (message.param) + JobNextParam.internalBinaryWrite(message.param, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.JobNextRequest + */ +export const JobNextRequest = new JobNextRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobNextReply$Type extends MessageType { + constructor() { + super("service.JobNextReply", [ + { no: 1, name: "job", kind: "message", T: () => Job } + ]); + } + create(value?: PartialMessage): JobNextReply { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobNextReply): JobNextReply { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* job.Job job */ 1: + message.job = Job.internalBinaryRead(reader, reader.uint32(), options, message.job); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobNextReply, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* job.Job job = 1; */ + if (message.job) + Job.internalBinaryWrite(message.job, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.JobNextReply + */ +export const JobNextReply = new JobNextReply$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobDisplayRequest$Type extends MessageType { + constructor() { + super("service.JobDisplayRequest", [ + { no: 1, name: "id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ } + ]); + } + create(value?: PartialMessage): JobDisplayRequest { + const message = { id: 0n }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobDisplayRequest): JobDisplayRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 id */ 1: + message.id = reader.int64().toBigInt(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobDisplayRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int64 id = 1; */ + if (message.id !== 0n) + writer.tag(1, WireType.Varint).int64(message.id); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.JobDisplayRequest + */ +export const JobDisplayRequest = new JobDisplayRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobDisplayReply$Type extends MessageType { + constructor() { + super("service.JobDisplayReply", [ + { no: 1, name: "display", kind: "message", T: () => JobDisplay } + ]); + } + create(value?: PartialMessage): JobDisplayReply { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobDisplayReply): JobDisplayReply { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* job.JobDisplay display */ 1: + message.display = JobDisplay.internalBinaryRead(reader, reader.uint32(), options, message.display); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobDisplayReply, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* job.JobDisplay display = 1; */ + if (message.display) + JobDisplay.internalBinaryWrite(message.display, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.JobDisplayReply + */ +export const JobDisplayReply = new JobDisplayReply$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobGetLogRequest$Type extends MessageType { + constructor() { + super("service.JobGetLogRequest", [ + { no: 1, name: "job_id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 2, name: "offset", kind: "scalar", opt: true, T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ } + ]); + } + create(value?: PartialMessage): JobGetLogRequest { + const message = { jobId: 0n }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobGetLogRequest): JobGetLogRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 job_id */ 1: + message.jobId = reader.int64().toBigInt(); + break; + case /* optional int64 offset */ 2: + message.offset = reader.int64().toBigInt(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobGetLogRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int64 job_id = 1; */ + if (message.jobId !== 0n) + writer.tag(1, WireType.Varint).int64(message.jobId); + /* optional int64 offset = 2; */ + if (message.offset !== undefined) + writer.tag(2, WireType.Varint).int64(message.offset); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.JobGetLogRequest + */ +export const JobGetLogRequest = new JobGetLogRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobGetLogReply$Type extends MessageType { + constructor() { + super("service.JobGetLogReply", [ + { no: 1, name: "logs", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): JobGetLogReply { + const message = { logs: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobGetLogReply): JobGetLogReply { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bytes logs */ 1: + message.logs = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobGetLogReply, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* bytes logs = 1; */ + if (message.logs.length) + writer.tag(1, WireType.LengthDelimited).bytes(message.logs); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.JobGetLogReply + */ +export const JobGetLogReply = new JobGetLogReply$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class SourceListRequest$Type extends MessageType { + constructor() { + super("service.SourceListRequest", [ + { no: 1, name: "path", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): SourceListRequest { + const message = { path: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: SourceListRequest): SourceListRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string path */ 1: + message.path = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: SourceListRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string path = 1; */ + if (message.path !== "") + writer.tag(1, WireType.LengthDelimited).string(message.path); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.SourceListRequest + */ +export const SourceListRequest = new SourceListRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class SourceListReply$Type extends MessageType { + constructor() { + super("service.SourceListReply", [ + { no: 1, name: "file", kind: "message", T: () => SourceFile }, + { no: 2, name: "chain", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => SourceFile }, + { no: 17, name: "children", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => SourceFile } + ]); + } + create(value?: PartialMessage): SourceListReply { + const message = { chain: [], children: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: SourceListReply): SourceListReply { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* source.SourceFile file */ 1: + message.file = SourceFile.internalBinaryRead(reader, reader.uint32(), options, message.file); + break; + case /* repeated source.SourceFile chain */ 2: + message.chain.push(SourceFile.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* repeated source.SourceFile children */ 17: + message.children.push(SourceFile.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: SourceListReply, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* source.SourceFile file = 1; */ + if (message.file) + SourceFile.internalBinaryWrite(message.file, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* repeated source.SourceFile chain = 2; */ + for (let i = 0; i < message.chain.length; i++) + SourceFile.internalBinaryWrite(message.chain[i], writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + /* repeated source.SourceFile children = 17; */ + for (let i = 0; i < message.children.length; i++) + SourceFile.internalBinaryWrite(message.children[i], writer.tag(17, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.SourceListReply + */ +export const SourceListReply = new SourceListReply$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class DeviceListRequest$Type extends MessageType { + constructor() { + super("service.DeviceListRequest", []); + } + create(value?: PartialMessage): DeviceListRequest { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeviceListRequest): DeviceListRequest { + return target ?? this.create(); + } + internalBinaryWrite(message: DeviceListRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.DeviceListRequest + */ +export const DeviceListRequest = new DeviceListRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class DeviceListReply$Type extends MessageType { + constructor() { + super("service.DeviceListReply", [ + { no: 1, name: "devices", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): DeviceListReply { + const message = { devices: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeviceListReply): DeviceListReply { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated string devices */ 1: + message.devices.push(reader.string()); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: DeviceListReply, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated string devices = 1; */ + for (let i = 0; i < message.devices.length; i++) + writer.tag(1, WireType.LengthDelimited).string(message.devices[i]); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message service.DeviceListReply + */ +export const DeviceListReply = new DeviceListReply$Type(); +/** + * @generated ServiceType for protobuf service service.Service + */ +export const Service = new ServiceType("service.Service", [ + { name: "FileGet", options: {}, I: FileGetRequest, O: FileGetReply }, + { name: "FileEdit", options: {}, I: FileEditRequest, O: FileEditReply }, + { name: "FileMkdir", options: {}, I: FileMkdirRequest, O: FileMkdirReply }, + { name: "FileDelete", options: {}, I: FileDeleteRequest, O: FileDeleteReply }, + { name: "FileListParents", options: {}, I: FileListParentsRequest, O: FileListParentsReply }, + { name: "TapeMGet", options: {}, I: TapeMGetRequest, O: TapeMGetReply }, + { name: "JobList", options: {}, I: JobListRequest, O: JobListReply }, + { name: "JobCreate", options: {}, I: JobCreateRequest, O: JobCreateReply }, + { name: "JobNext", options: {}, I: JobNextRequest, O: JobNextReply }, + { name: "JobDisplay", options: {}, I: JobDisplayRequest, O: JobDisplayReply }, + { name: "JobGetLog", options: {}, I: JobGetLogRequest, O: JobGetLogReply }, + { name: "SourceList", options: {}, I: SourceListRequest, O: SourceListReply }, + { name: "DeviceList", options: {}, I: DeviceListRequest, O: DeviceListReply } +]); diff --git a/frontend/src/entity/source.ts b/frontend/src/entity/source.ts new file mode 100644 index 0000000..5f7b1dc --- /dev/null +++ b/frontend/src/entity/source.ts @@ -0,0 +1,281 @@ +// @generated by protobuf-ts 2.8.2 +// @generated from protobuf file "source.proto" (package "source", syntax proto3) +// tslint:disable +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +import { CopyStatus } from "./copy_status"; +/** + * @generated from protobuf message source.SourceFile + */ +export interface SourceFile { + /** + * @generated from protobuf field: string path = 1; + */ + path: string; + /** + * @generated from protobuf field: string parent_path = 2; + */ + parentPath: string; + /** + * @generated from protobuf field: string name = 3; + */ + name: string; + /** + * @generated from protobuf field: int64 mode = 17; + */ + mode: bigint; + /** + * @generated from protobuf field: int64 mod_time = 18; + */ + modTime: bigint; + /** + * @generated from protobuf field: int64 size = 19; + */ + size: bigint; +} +/** + * @generated from protobuf message source.Source + */ +export interface Source { + /** + * @generated from protobuf field: string base = 1; + */ + base: string; + /** + * @generated from protobuf field: repeated string path = 2; + */ + path: string[]; +} +/** + * @generated from protobuf message source.SourceState + */ +export interface SourceState { + /** + * @generated from protobuf field: source.Source source = 1; + */ + source?: Source; + /** + * @generated from protobuf field: int64 size = 2; + */ + size: bigint; + /** + * @generated from protobuf field: copy_status.CopyStatus status = 3; + */ + status: CopyStatus; + /** + * @generated from protobuf field: optional string message = 4; + */ + message?: string; +} +// @generated message type with reflection information, may provide speed optimized methods +class SourceFile$Type extends MessageType { + constructor() { + super("source.SourceFile", [ + { no: 1, name: "path", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "parent_path", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 17, name: "mode", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 18, name: "mod_time", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 19, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ } + ]); + } + create(value?: PartialMessage): SourceFile { + const message = { path: "", parentPath: "", name: "", mode: 0n, modTime: 0n, size: 0n }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: SourceFile): SourceFile { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string path */ 1: + message.path = reader.string(); + break; + case /* string parent_path */ 2: + message.parentPath = reader.string(); + break; + case /* string name */ 3: + message.name = reader.string(); + break; + case /* int64 mode */ 17: + message.mode = reader.int64().toBigInt(); + break; + case /* int64 mod_time */ 18: + message.modTime = reader.int64().toBigInt(); + break; + case /* int64 size */ 19: + message.size = reader.int64().toBigInt(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: SourceFile, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string path = 1; */ + if (message.path !== "") + writer.tag(1, WireType.LengthDelimited).string(message.path); + /* string parent_path = 2; */ + if (message.parentPath !== "") + writer.tag(2, WireType.LengthDelimited).string(message.parentPath); + /* string name = 3; */ + if (message.name !== "") + writer.tag(3, WireType.LengthDelimited).string(message.name); + /* int64 mode = 17; */ + if (message.mode !== 0n) + writer.tag(17, WireType.Varint).int64(message.mode); + /* int64 mod_time = 18; */ + if (message.modTime !== 0n) + writer.tag(18, WireType.Varint).int64(message.modTime); + /* int64 size = 19; */ + if (message.size !== 0n) + writer.tag(19, WireType.Varint).int64(message.size); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message source.SourceFile + */ +export const SourceFile = new SourceFile$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Source$Type extends MessageType { + constructor() { + super("source.Source", [ + { no: 1, name: "base", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "path", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): Source { + const message = { base: "", path: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Source): Source { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string base */ 1: + message.base = reader.string(); + break; + case /* repeated string path */ 2: + message.path.push(reader.string()); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Source, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string base = 1; */ + if (message.base !== "") + writer.tag(1, WireType.LengthDelimited).string(message.base); + /* repeated string path = 2; */ + for (let i = 0; i < message.path.length; i++) + writer.tag(2, WireType.LengthDelimited).string(message.path[i]); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message source.Source + */ +export const Source = new Source$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class SourceState$Type extends MessageType { + constructor() { + super("source.SourceState", [ + { no: 1, name: "source", kind: "message", T: () => Source }, + { no: 2, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 3, name: "status", kind: "enum", T: () => ["copy_status.CopyStatus", CopyStatus] }, + { no: 4, name: "message", kind: "scalar", opt: true, T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): SourceState { + const message = { size: 0n, status: 0 }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: SourceState): SourceState { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* source.Source source */ 1: + message.source = Source.internalBinaryRead(reader, reader.uint32(), options, message.source); + break; + case /* int64 size */ 2: + message.size = reader.int64().toBigInt(); + break; + case /* copy_status.CopyStatus status */ 3: + message.status = reader.int32(); + break; + case /* optional string message */ 4: + message.message = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: SourceState, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* source.Source source = 1; */ + if (message.source) + Source.internalBinaryWrite(message.source, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* int64 size = 2; */ + if (message.size !== 0n) + writer.tag(2, WireType.Varint).int64(message.size); + /* copy_status.CopyStatus status = 3; */ + if (message.status !== 0) + writer.tag(3, WireType.Varint).int32(message.status); + /* optional string message = 4; */ + if (message.message !== undefined) + writer.tag(4, WireType.LengthDelimited).string(message.message); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message source.SourceState + */ +export const SourceState = new SourceState$Type(); diff --git a/frontend/src/entity/tape.ts b/frontend/src/entity/tape.ts new file mode 100644 index 0000000..e0e02c3 --- /dev/null +++ b/frontend/src/entity/tape.ts @@ -0,0 +1,146 @@ +// @generated by protobuf-ts 2.8.2 +// @generated from protobuf file "tape.proto" (package "tape", syntax proto3) +// tslint:disable +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +/** + * @generated from protobuf message tape.Tape + */ +export interface Tape { + /** + * @generated from protobuf field: int64 id = 1; + */ + id: bigint; + /** + * @generated from protobuf field: string barcode = 2; + */ + barcode: string; + /** + * @generated from protobuf field: string name = 3; + */ + name: string; + /** + * @generated from protobuf field: string encryption = 4; + */ + encryption: string; + /** + * @generated from protobuf field: int64 create_time = 17; + */ + createTime: bigint; + /** + * @generated from protobuf field: optional int64 destroy_time = 18; + */ + destroyTime?: bigint; + /** + * @generated from protobuf field: int64 capacity_bytes = 19; + */ + capacityBytes: bigint; + /** + * @generated from protobuf field: int64 writen_bytes = 20; + */ + writenBytes: bigint; +} +// @generated message type with reflection information, may provide speed optimized methods +class Tape$Type extends MessageType { + constructor() { + super("tape.Tape", [ + { no: 1, name: "id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 2, name: "barcode", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "encryption", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 17, name: "create_time", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 18, name: "destroy_time", kind: "scalar", opt: true, T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 19, name: "capacity_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 20, name: "writen_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ } + ]); + } + create(value?: PartialMessage): Tape { + const message = { id: 0n, barcode: "", name: "", encryption: "", createTime: 0n, capacityBytes: 0n, writenBytes: 0n }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Tape): Tape { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 id */ 1: + message.id = reader.int64().toBigInt(); + break; + case /* string barcode */ 2: + message.barcode = reader.string(); + break; + case /* string name */ 3: + message.name = reader.string(); + break; + case /* string encryption */ 4: + message.encryption = reader.string(); + break; + case /* int64 create_time */ 17: + message.createTime = reader.int64().toBigInt(); + break; + case /* optional int64 destroy_time */ 18: + message.destroyTime = reader.int64().toBigInt(); + break; + case /* int64 capacity_bytes */ 19: + message.capacityBytes = reader.int64().toBigInt(); + break; + case /* int64 writen_bytes */ 20: + message.writenBytes = reader.int64().toBigInt(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Tape, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int64 id = 1; */ + if (message.id !== 0n) + writer.tag(1, WireType.Varint).int64(message.id); + /* string barcode = 2; */ + if (message.barcode !== "") + writer.tag(2, WireType.LengthDelimited).string(message.barcode); + /* string name = 3; */ + if (message.name !== "") + writer.tag(3, WireType.LengthDelimited).string(message.name); + /* string encryption = 4; */ + if (message.encryption !== "") + writer.tag(4, WireType.LengthDelimited).string(message.encryption); + /* int64 create_time = 17; */ + if (message.createTime !== 0n) + writer.tag(17, WireType.Varint).int64(message.createTime); + /* optional int64 destroy_time = 18; */ + if (message.destroyTime !== undefined) + writer.tag(18, WireType.Varint).int64(message.destroyTime); + /* int64 capacity_bytes = 19; */ + if (message.capacityBytes !== 0n) + writer.tag(19, WireType.Varint).int64(message.capacityBytes); + /* int64 writen_bytes = 20; */ + if (message.writenBytes !== 0n) + writer.tag(20, WireType.Varint).int64(message.writenBytes); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message tape.Tape + */ +export const Tape = new Tape$Type(); diff --git a/frontend/src/file.tsx b/frontend/src/file.tsx new file mode 100644 index 0000000..fb7b20f --- /dev/null +++ b/frontend/src/file.tsx @@ -0,0 +1,184 @@ +import { useState, useRef, useEffect, useMemo, useCallback } from "react"; + +import Grid from "@mui/material/Grid"; +import Box from "@mui/material/Box"; +import { FullFileBrowser, FileBrowserHandle, FileArray } from "chonky"; +import { ChonkyActions, ChonkyFileActionData } from "chonky"; + +import "./app.less"; +import { cli, convertFiles } from "./api"; +import { Root } from "./api"; +import { RenameFileAction, RefreshListAction } from "./actions"; + +import { useDetailModal, DetailModal } from "./detail"; +import { FileGetReply } from "./entity"; + +const useDualSide = () => { + const left = useRef(null); + const right = useRef(null); + const instances = { left, right }; + + const refreshAll = useCallback(async () => { + await Promise.all( + Object.values(instances).map((ref) => { + if (!ref || !ref.current) { + return; + } + return ref.current.requestFileAction(RefreshListAction, {}); + }) + ); + }, [instances]); + + return { instances, refreshAll }; +}; + +const useFileBrowser = (refreshAll: () => Promise, openDetailModel: (detail: FileGetReply) => void) => { + const [files, setFiles] = useState(Array(1).fill(null)); + const [folderChain, setFolderChan] = useState([Root]); + const currentID = useMemo(() => { + if (folderChain.length === 0) { + return "0"; + } + + const last = folderChain.slice(-1)[0]; + if (!last) { + return "0"; + } + + return last.id; + }, [folderChain]); + + const openFolder = useCallback((id: string) => { + (async () => { + const [file, folderChain] = await Promise.all([cli.fileGet({ id: BigInt(id) }).response, cli.fileListParents({ id: BigInt(id) }).response]); + + setFiles(convertFiles(file.children)); + setFolderChan([Root, ...convertFiles(folderChain.parents)]); + })(); + }, []); + useEffect(() => openFolder(Root.id), []); + + const onFileAction = useCallback( + (data: ChonkyFileActionData) => { + // console.log(data); + switch (data.id) { + case ChonkyActions.OpenFiles.id: + (async () => { + const { targetFile, files } = data.payload; + + const fileToOpen = targetFile ?? files[0]; + if (!fileToOpen) { + return; + } + + if (fileToOpen.isDir) { + await openFolder(fileToOpen.id); + return; + } + + const file = await cli.fileGet({ id: BigInt(fileToOpen.id) }).response; + await openDetailModel(file); + })(); + + return; + case ChonkyActions.MoveFiles.id: + (async () => { + const { destination, files } = data.payload; + for (const file of files) { + await cli.fileEdit({ + id: BigInt(file.id), + file: { parentId: BigInt(destination.id) }, + }).response; + } + await refreshAll(); + })(); + + return; + case RenameFileAction.id: + (async () => { + const files = data.state.selectedFilesForAction; + if (files.length === 0) { + return; + } + const file = files[0]; + + const name = prompt("Provide new name for this file:", file.name); + if (!name) { + return; + } + + await cli.fileEdit({ id: BigInt(file.id), file: { name } }).response; + await refreshAll(); + })(); + return; + case ChonkyActions.CreateFolder.id: + (async () => { + const name = prompt("Provide the name for your new folder:"); + if (!name) { + return; + } + + await cli.fileMkdir({ parentId: BigInt(currentID), path: name }).response; + await refreshAll(); + })(); + return; + case ChonkyActions.DeleteFiles.id: + (async () => { + const files = data.state.selectedFilesForAction; + const fileids = files.map((file) => BigInt(file.id)); + await cli.fileDelete({ ids: fileids }).response; + await refreshAll(); + })(); + + return; + case RefreshListAction.id: + openFolder(currentID); + return; + } + }, + [openFolder, openDetailModel, refreshAll, currentID] + ); + + const fileActions = useMemo(() => [ChonkyActions.CreateFolder, ChonkyActions.DeleteFiles, ChonkyActions.MoveFiles, RenameFileAction, RefreshListAction], []); + + return { + files, + folderChain, + onFileAction, + fileActions, + defaultFileViewActionId: ChonkyActions.EnableListView.id, + doubleClickDelay: 300, + }; +}; + +export const FileBrowserType = "file"; + +export const FileBrowser = () => { + const { instances, refreshAll } = useDualSide(); + const { detail, openDetailModel, closeDetailModel } = useDetailModal(); + + const leftProps = useFileBrowser(refreshAll, openDetailModel); + const rightProps = useFileBrowser(refreshAll, openDetailModel); + + useEffect(() => { + Object.values(instances).map((inst) => inst.current?.requestFileAction(ChonkyActions.ToggleHiddenFiles, {})); + const interval = setInterval(() => { + Object.values(instances).map((inst) => inst.current && inst.current.requestFileAction(RefreshListAction, {})); + }, 10000); + return () => clearInterval(interval); + }, []); + + return ( + + + + + + + + + + + + ); +}; diff --git a/frontend/src/index.css b/frontend/src/index.css new file mode 100644 index 0000000..e174036 --- /dev/null +++ b/frontend/src/index.css @@ -0,0 +1,41 @@ +:root { + padding: 0; + margin: 0; + height: 100%; + width: 100%; + overflow: hidden; +} + +body { + padding: 0; + margin: 0; + height: 100%; + width: 100%; + + background-color: #ffffff; + font-synthesis: none; + text-rendering: optimizeLegibility; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; + -webkit-text-size-adjust: 100%; +} + +#root { + height: 100%; + width: 100%; + margin: 0; + padding: 0; + text-align: center; +} + +/* +:root { + font-family: Inter, Avenir, Helvetica, Arial, sans-serif; + font-size: 16px; + line-height: 24px; + font-weight: 400; + + color-scheme: light dark; + color: rgba(255, 255, 255, 0.87); + +} */ diff --git a/frontend/src/init.tsx b/frontend/src/init.tsx new file mode 100644 index 0000000..ea11f47 --- /dev/null +++ b/frontend/src/init.tsx @@ -0,0 +1,31 @@ +import { setChonkyDefaults } from "chonky"; +import { ChonkyIconFA } from "chonky-icon-fontawesome"; + +import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; +import { faPencilAlt } from "@fortawesome/free-solid-svg-icons/faPencilAlt"; + +const ExternalIcons: Record = { + edit: faPencilAlt, +}; + +setChonkyDefaults({ + iconComponent: (props) => { + const icon = ExternalIcons[props.icon] as any; + if (!!icon) { + const faProps = { + ...props, + icon: icon, + } as const; + return ; + } + + return ; + }, +}); + +import { unstable_ClassNameGenerator as ClassNameGenerator } from "@mui/material/className"; + +ClassNameGenerator.configure( + // Do something with the componentName + (componentName: string) => `app-${componentName}` +); diff --git a/frontend/src/jobs.tsx b/frontend/src/jobs.tsx new file mode 100644 index 0000000..3b5da0e --- /dev/null +++ b/frontend/src/jobs.tsx @@ -0,0 +1,471 @@ +import { Fragment, ChangeEvent } from "react"; +import { useState, useRef, useEffect, useMemo, useCallback, FC } from "react"; +import { assert } from "@protobuf-ts/runtime"; + +import Grid from "@mui/material/Grid"; +import Box from "@mui/material/Box"; +import List from "@mui/material/List"; +import ListItemButton from "@mui/material/ListItemButton"; +import ListItemText from "@mui/material/ListItemText"; +import Typography from "@mui/material/Typography"; + +import Card from "@mui/material/Card"; +import CardActions from "@mui/material/CardActions"; +import CardContent from "@mui/material/CardContent"; + +import Button from "@mui/material/Button"; +import TextField from "@mui/material/TextField"; +import MenuItem from "@mui/material/MenuItem"; + +import Dialog from "@mui/material/Dialog"; +import DialogActions from "@mui/material/DialogActions"; +import DialogContent from "@mui/material/DialogContent"; +import DialogContentText from "@mui/material/DialogContentText"; +import DialogTitle from "@mui/material/DialogTitle"; +import LinearProgress from "@mui/material/LinearProgress"; +import Divider from "@mui/material/Divider"; + +import "./app.less"; +import { cli, sleep } from "./api"; +import { Job, JobDisplay, JobCreateRequest, JobListRequest, JobNextRequest, JobStatus, CopyStatus } from "./entity"; +import { JobArchiveCopyingParam, JobArchiveStep, JobDisplayArchive, JobParamArchive, JobStateArchive } from "./entity"; +import { SourceState } from "./entity"; + +import { formatFilesize } from "./tools"; + +export const JobsType = "jobs"; +type DisplayableJob = Job & Partial; + +export const JobsBrowser = () => { + const [jobs, setJobs] = useState([]); + const refresh = useCallback(async () => { + const jobReplys = await cli.jobList(JobListRequest.create({ param: { oneofKind: "list", list: {} } })).response; + const displayReplys = await Promise.all(jobReplys.jobs.map((job) => cli.jobDisplay({ id: job.id }).response)); + const targets = jobReplys.jobs.map((job, idx) => ({ ...job, ...displayReplys[idx].display })); + console.log("refresh jobs list, ", targets); + setJobs(targets); + }, [setJobs]); + useEffect(() => { + refresh(); + const timer = setInterval(refresh, 2000); + return () => { + clearInterval(timer); + }; + }, []); + + return ( + + + + + // Nested List Items + // + // } + > + + + + + {jobs.map((job) => ( + + ))} + + + + ); +}; + +const GetJobCard = ({ job, refresh }: { job: DisplayableJob; refresh: () => Promise }): JSX.Element => { + if (!job.state) { + return ; + } + + const type = job.state?.state.oneofKind; + switch (type) { + case "archive": + return ( + + ); + default: + return ; + } +}; + +type ArchiveLastDisplay = { copyedBytes: bigint; lastUpdate: number }; + +const ArchiveCard = ({ + job, + state, + display, + refresh, +}: { + job: Job; + state: JobStateArchive; + display: JobDisplayArchive | null; + refresh: () => Promise; +}): JSX.Element => { + const [lastDisplay, setLastDisplay] = useState(null); + const [speed, setSpeed] = useState(NaN); + const diffDisplay = useCallback( + (current: ArchiveLastDisplay | null) => { + if (!lastDisplay) { + setLastDisplay(current); + return; + } + if (!current) { + setLastDisplay(current); + setSpeed(NaN); + return; + } + + const duration = current.lastUpdate - lastDisplay.lastUpdate; + if (duration) { + const speed = ((Number(current.copyedBytes) - Number(lastDisplay.copyedBytes)) / duration) * 1000; + setSpeed(speed); + } + + setLastDisplay(current); + }, + [lastDisplay, setLastDisplay, setSpeed] + ); + useEffect(() => { + if (!display) { + diffDisplay(null); + return; + } + diffDisplay({ copyedBytes: display.copyedBytes, lastUpdate: Date.now() }); + }, [display]); + + const [fields, progress] = useMemo(() => { + const totalFiles = state.sources.length; + let submitedFiles = 0, + submitedBytes = 0, + totalBytes = 0; + for (const file of state.sources) { + totalBytes += Number(file.size); + if (file.status !== CopyStatus.Submited) { + continue; + } + submitedFiles++; + submitedBytes += Number(file.size); + } + + const copyedFiles = submitedFiles + Number(display?.copyedFiles || 0n); + const copyedBytes = submitedBytes + Number(display?.copyedBytes || 0n); + + const progress = (totalBytes > 0 ? copyedBytes / totalBytes : 1) * 100; + const fields = [ + { name: "Current Step", value: JobArchiveStep[state.step] }, + { name: "Current Speed", value: !Number.isNaN(speed) ? `${formatFilesize(speed)}/s` : "--" }, + { name: "Total Files", value: totalFiles }, + { name: "Total Bytes", value: formatFilesize(totalBytes) }, + { name: "Submited Files", value: submitedFiles }, + { name: "Submited Bytes", value: formatFilesize(submitedBytes) }, + { name: "Copyed Files", value: copyedFiles }, + { name: "Copyed Bytes", value: formatFilesize(copyedBytes) }, + ]; + + return [fields, progress]; + }, [state, display, speed]); + + return ( + + + + + + + {fields.map((field, idx) => ( + + + {field.name}: {field.value} + + + ))} + + } + buttons={ + + {state.step === JobArchiveStep.WaitForTape && } + + + + } + /> + ); +}; + +const NewArchiveDialog = ({ refresh }: { refresh: () => Promise }) => { + const [open, setOpen] = useState(false); + const handleClickOpen = () => { + setOpen(true); + }; + const handleClose = () => { + setOpen(false); + }; + + const [source, setSource] = useState(""); + const handleSubmit = async () => { + let path = source.trim(); + if (path.length === 0) { + return; + } + + while (path.endsWith("/")) { + path = path.slice(0, -1); + } + + const splitIdx = path.lastIndexOf("/"); + if (splitIdx < 0) { + return; + } + + console.log(await cli.jobCreate(makeArchiveParam(1n, { sources: [{ base: path.slice(0, splitIdx + 1), path: [path.slice(splitIdx + 1)] }] })).response); + await refresh(); + handleClose(); + }; + + return ( + + + + + {open && ( + + New Archive Job + + ) => setSource(event.target.value)} + /> + + + + + + + )} + + ); +}; + +const LoadTapeDialog = ({ job, refresh }: { job: Job; refresh: () => Promise }) => { + const [devices, setDevices] = useState([]); + const [param, setParam] = useState(null); + const handleClickOpen = async () => { + const reply = await cli.deviceList({}).response; + setDevices(reply.devices); + setParam(JobArchiveCopyingParam.create()); + }; + const handleClose = () => { + setParam(null); + setDevices([]); + }; + const handleChange = (key: keyof JobArchiveCopyingParam) => (event: ChangeEvent) => { + if (param === null) { + return; + } + setParam({ ...param, [key]: event.target.value }); + }; + const handleSubmit = async () => { + if (!param) { + return; + } + + const trimedParam: JobArchiveCopyingParam = { + device: param.device, + barcode: param.barcode.toUpperCase(), + name: param.name, + }; + assert(trimedParam.barcode.length === 6); + + const reply = await cli.jobNext(makeArchiveCopyingParam(job.id, trimedParam)).response; + console.log("job next reply= ", reply); + await refresh(); + handleClose(); + }; + + return ( + + + {param && ( + + Load Tape + + After load tape into tape drive, click 'Submit' + + {devices.map((device) => ( + + {device} + + ))} + + + + + + + + + + )} + + ); +}; + +const ViewLogDialog = ({ jobID }: { jobID: bigint }) => { + const [open, setOpen] = useState(false); + const handleClickOpen = () => { + setOpen(true); + }; + const handleClose = () => { + setOpen(false); + }; + + return ( + + + {open && ( + + View Log + + + + + + + + )} + + ); +}; + +const LogConsole = ({ jobId }: { jobId: bigint }) => { + const [log, setLog] = useState(""); + const refreshLog = useCallback(async () => { + const reply = await cli.jobGetLog({ jobId, offset: BigInt(log.length) }).response; + setLog(log + new TextDecoder().decode(reply.logs)); + }, [log, setLog]); + useEffect(() => { + let closed = false; + (async () => { + while (!closed) { + await refreshLog(); + await sleep(2000); + } + })(); + + return () => { + closed = true; + }; + }, []); + + return
{log || "loading..."}
; +}; + +const ArchiveViewFilesDialog = ({ sources }: { sources: SourceState[] }) => { + const [open, setOpen] = useState(false); + const handleClickOpen = () => { + setOpen(true); + }; + const handleClose = () => { + setOpen(false); + }; + + return ( + + + {open && ( + + View Files + + {sources.map((src) => { + if (!src.source) { + return null; + } + return ( + + ); + })} + + + + + + )} + + ); +}; + +const JobCard = ({ job, detail, buttons }: { job: Job; detail?: JSX.Element; buttons?: JSX.Element }) => { + return ( + + + + {`${JobStatus[job.status]}`} + + {`${job.state?.state.oneofKind?.toUpperCase()} Job ${job.id}`} + {detail} + + + {buttons} + + ); +}; + +function makeArchiveParam(priority: bigint, param: JobParamArchive): JobCreateRequest { + return { + job: { + priority, + param: { + param: { + oneofKind: "archive", + archive: param, + }, + }, + }, + }; +} + +function makeArchiveCopyingParam(jobID: bigint, param: JobArchiveCopyingParam): JobNextRequest { + return { + id: jobID, + param: { + param: { + oneofKind: "archive", + archive: { + param: { + oneofKind: "copying", + copying: param, + }, + }, + }, + }, + }; +} diff --git a/frontend/src/main.tsx b/frontend/src/main.tsx new file mode 100644 index 0000000..f7a9b63 --- /dev/null +++ b/frontend/src/main.tsx @@ -0,0 +1,10 @@ +import React from "react"; +import ReactDOM from "react-dom/client"; +import App from "./app"; +import "./index.css"; + +import "./init"; + +ReactDOM.createRoot(document.getElementById("root") as HTMLElement).render( + +); diff --git a/frontend/src/tools.ts b/frontend/src/tools.ts new file mode 100644 index 0000000..5f17f53 --- /dev/null +++ b/frontend/src/tools.ts @@ -0,0 +1,15 @@ +import { filesize } from "filesize"; + +export const hexEncode = (buf: string) => { + var str = ""; + for (var i = 0; i < buf.length; i++) { + str += buf[i].charCodeAt(0).toString(16); + } + return str; +}; + +export const formatFilesize = (size: number | bigint): string => + filesize(size, { + base: 2, + standard: "jedec", + }) as string; diff --git a/frontend/src/vite-env.d.ts b/frontend/src/vite-env.d.ts new file mode 100644 index 0000000..11f02fe --- /dev/null +++ b/frontend/src/vite-env.d.ts @@ -0,0 +1 @@ +/// diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json new file mode 100644 index 0000000..18c5596 --- /dev/null +++ b/frontend/tsconfig.json @@ -0,0 +1,21 @@ +{ + "compilerOptions": { + "target": "es2020", + "useDefineForClassFields": true, + "lib": ["DOM", "DOM.Iterable", "es2020"], + "allowJs": false, + "skipLibCheck": true, + "esModuleInterop": false, + "allowSyntheticDefaultImports": true, + "strict": true, + "forceConsistentCasingInFileNames": true, + "module": "ESNext", + "moduleResolution": "Node", + "resolveJsonModule": true, + "isolatedModules": true, + "noEmit": true, + "jsx": "react-jsx" + }, + "include": ["src"], + "references": [{ "path": "./tsconfig.node.json" }] +} diff --git a/frontend/tsconfig.node.json b/frontend/tsconfig.node.json new file mode 100644 index 0000000..9d31e2a --- /dev/null +++ b/frontend/tsconfig.node.json @@ -0,0 +1,9 @@ +{ + "compilerOptions": { + "composite": true, + "module": "ESNext", + "moduleResolution": "Node", + "allowSyntheticDefaultImports": true + }, + "include": ["vite.config.ts"] +} diff --git a/frontend/vite.config.ts b/frontend/vite.config.ts new file mode 100644 index 0000000..d289dd6 --- /dev/null +++ b/frontend/vite.config.ts @@ -0,0 +1,15 @@ +import { defineConfig } from "vite"; +import react from "@vitejs/plugin-react"; + +// https://vitejs.dev/config/ +export default defineConfig({ + plugins: [react()], + build: { + target: "es2020", + }, + optimizeDeps: { + esbuildOptions: { + target: "es2020", + }, + }, +}); diff --git a/go.mod b/go.mod index 7f6af36..734c017 100644 --- a/go.mod +++ b/go.mod @@ -1,12 +1,15 @@ module github.com/abc950309/tapewriter -go 1.17 +go 1.18 require ( - github.com/benmcclelland/mtio v0.0.0-20170506231306-f929531fb4fe - github.com/benmcclelland/sgio v0.0.0-20180629175614-f710aebf64c1 + github.com/abc950309/acp v0.0.0-20221212144614-c5de5e555428 + github.com/aws/aws-sdk-go v1.44.118 github.com/davecgh/go-spew v1.1.1 - github.com/schollz/progressbar/v3 v3.10.1 + github.com/deckarep/golang-set/v2 v2.1.0 + github.com/gin-contrib/cors v1.4.0 + github.com/gin-gonic/gin v1.8.1 + github.com/google/uuid v1.3.0 github.com/sirupsen/logrus v1.9.0 gorm.io/driver/mysql v1.3.6 gorm.io/driver/sqlite v1.3.6 @@ -14,15 +17,46 @@ require ( ) require ( + github.com/apache/thrift v0.17.0 // indirect + github.com/cenkalti/backoff/v4 v4.1.1 // indirect + github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f // indirect + github.com/gin-contrib/sse v0.1.0 // indirect + github.com/go-playground/locales v0.14.0 // indirect + github.com/go-playground/universal-translator v0.18.0 // indirect + github.com/go-playground/validator/v10 v10.10.0 // indirect github.com/go-sql-driver/mysql v1.6.0 // indirect + github.com/goccy/go-json v0.9.7 // indirect + github.com/golang/protobuf v1.5.2 // indirect + github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/go-multierror v1.1.1 // indirect + github.com/improbable-eng/grpc-web v0.15.0 // indirect github.com/jinzhu/inflection v1.0.0 // indirect github.com/jinzhu/now v1.1.5 // indirect - github.com/klauspost/cpuid/v2 v2.0.4 // indirect - github.com/mattn/go-runewidth v0.0.13 // indirect + github.com/jmespath/go-jmespath v0.4.0 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/klauspost/compress v1.11.7 // indirect + github.com/klauspost/cpuid/v2 v2.2.2 // indirect + github.com/leodido/go-urn v1.2.1 // indirect + github.com/mattn/go-isatty v0.0.16 // indirect + github.com/mattn/go-runewidth v0.0.14 // indirect github.com/mattn/go-sqlite3 v1.14.12 // indirect github.com/minio/sha256-simd v1.0.0 // indirect github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db // indirect - github.com/rivo/uniseg v0.3.4 // indirect - golang.org/x/sys v0.0.0-20220829200755-d48e67d00261 // indirect - golang.org/x/term v0.0.0-20220722155259-a9ba230a4035 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/pelletier/go-toml/v2 v2.0.1 // indirect + github.com/rivo/uniseg v0.4.3 // indirect + github.com/rs/cors v1.7.0 // indirect + github.com/schollz/progressbar/v3 v3.12.2 // indirect + github.com/ugorji/go/codec v1.2.7 // indirect + golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97 // indirect + golang.org/x/net v0.0.0-20220722155237-a158d28d115b // indirect + golang.org/x/sys v0.3.0 // indirect + golang.org/x/term v0.3.0 // indirect + golang.org/x/text v0.4.0 // indirect + google.golang.org/genproto v0.0.0-20210126160654-44e461bb6506 // indirect + google.golang.org/grpc v1.51.0 // indirect + google.golang.org/protobuf v1.28.0 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + nhooyr.io/websocket v1.8.6 // indirect ) diff --git a/go.sum b/go.sum index e7a2d8b..843c962 100644 --- a/go.sum +++ b/go.sum @@ -1,52 +1,620 @@ -github.com/benmcclelland/mtio v0.0.0-20170506231306-f929531fb4fe h1:f+PTGRJrCYSquf31olVAWIqyJwx42eBzVH4D3igzgSk= -github.com/benmcclelland/mtio v0.0.0-20170506231306-f929531fb4fe/go.mod h1:XyVqnMjuqI1qOvgei81EgX68tV7BjN9JlluJPsjArs0= -github.com/benmcclelland/sgio v0.0.0-20180629175614-f710aebf64c1 h1:f1AIRyf6d21xBd1DirrIa6fk41O3LB0WvVuVqhPN4co= -github.com/benmcclelland/sgio v0.0.0-20180629175614-f710aebf64c1/go.mod h1:WdrapyVn/Aduwwf/OMW6sEtk9+7BSoMst1kGrx4E4xE= +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= +github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo= +github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= +github.com/VividCortex/gohistogram v1.0.0/go.mod h1:Pf5mBqqDxYaXu3hDrrU+w6nw50o/4+TcAqDqk/vUH7g= +github.com/abc950309/acp v0.0.0-20221007042439-988889e8bde8 h1:1H4Hcvda/3gEXz3VPYprlFRPgKwkzPLyndUTd+YQzfY= +github.com/abc950309/acp v0.0.0-20221007042439-988889e8bde8/go.mod h1:75zVdd0I1kbDxlaDN4gpQHIMzUdJsIx+6yfb/t3XFjU= +github.com/abc950309/acp v0.0.0-20221207115048-9fa93b905b52 h1:5XCs/jWNyPEQ3hHs7/nHsy4JD/gQ24qjoKFtrbdhHhw= +github.com/abc950309/acp v0.0.0-20221207115048-9fa93b905b52/go.mod h1:5RsleINAlZ326MJ8fmoCW9IJdnlpa6ZVdHqufsfcQMI= +github.com/abc950309/acp v0.0.0-20221207184804-d09399d928be h1:L09WRCT9Nc14hjQmQjRUU2FglUNRisdcUwb2nT8gwj4= +github.com/abc950309/acp v0.0.0-20221207184804-d09399d928be/go.mod h1:5RsleINAlZ326MJ8fmoCW9IJdnlpa6ZVdHqufsfcQMI= +github.com/abc950309/acp v0.0.0-20221208053032-9fde45f6fd43 h1:WSTOZD2ZiUHa4N+o93F5l6C50PJI7D9IFtoDZOWxP14= +github.com/abc950309/acp v0.0.0-20221208053032-9fde45f6fd43/go.mod h1:5RsleINAlZ326MJ8fmoCW9IJdnlpa6ZVdHqufsfcQMI= +github.com/abc950309/acp v0.0.0-20221208153837-7d4accc4414a h1:C3xEvAOvyITHoDwrRwivRHAOlmfeOp4VY0sZ89f6mdI= +github.com/abc950309/acp v0.0.0-20221208153837-7d4accc4414a/go.mod h1:5RsleINAlZ326MJ8fmoCW9IJdnlpa6ZVdHqufsfcQMI= +github.com/abc950309/acp v0.0.0-20221211170531-ae151264e710 h1:WedtGWyNGXdHZDft0XHfMB1YtMn1D2niEH+AYu25Ivk= +github.com/abc950309/acp v0.0.0-20221211170531-ae151264e710/go.mod h1:5RsleINAlZ326MJ8fmoCW9IJdnlpa6ZVdHqufsfcQMI= +github.com/abc950309/acp v0.0.0-20221212144614-c5de5e555428 h1:NQDEsoxNJDxdMuZCJq0R9hqeaR64X8oyEhx0PKUCSwo= +github.com/abc950309/acp v0.0.0-20221212144614-c5de5e555428/go.mod h1:5RsleINAlZ326MJ8fmoCW9IJdnlpa6ZVdHqufsfcQMI= +github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= +github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= +github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= +github.com/apache/thrift v0.17.0 h1:cMd2aj52n+8VoAtvSvLn4kDC3aZ6IAkBuqWQ2IDu7wo= +github.com/apache/thrift v0.17.0/go.mod h1:OLxhMRJxomX+1I/KUw03qoV3mMz16BwaKI+d4fPBx7Q= +github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A= +github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU= +github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= +github.com/aws/aws-sdk-go v1.44.118 h1:FJOqIRTukf7+Ulp047/k7JB6eqMXNnj7eb+coORThHQ= +github.com/aws/aws-sdk-go v1.44.118/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo= +github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ= +github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= +github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= +github.com/cenkalti/backoff/v4 v4.1.1 h1:G2HAfAmvm/GcKan2oOQpBXOd2tT2G57ZnZGWa1PxPBQ= +github.com/cenkalti/backoff/v4 v4.1.1/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/clbanning/x2j v0.0.0-20191024224557-825249438eec/go.mod h1:jMjuTZXRI4dUb/I5gc9Hdhagfvm9+RyrPryS/auMzxE= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= +github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI= +github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/deckarep/golang-set/v2 v2.1.0 h1:g47V4Or+DUdzbs8FxCCmgb6VYd+ptPAngjM6dtGktsI= +github.com/deckarep/golang-set/v2 v2.1.0/go.mod h1:VAky9rY/yGXJOLEDv3OMci+7wtDpOF4IN+y82NBOac4= +github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f h1:U5y3Y5UE0w7amNe7Z5G/twsBW0KEalRQXZzf8ufSh9I= +github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f/go.mod h1:xH/i4TFMt8koVQZ6WFms69WAsDWr2XsYL3Hkl7jkoLE= +github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= +github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= +github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= +github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= +github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M= +github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4safvEdbitLhGGK48rN6g= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4= +github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/gin-contrib/cors v1.4.0 h1:oJ6gwtUl3lqV0WEIwM/LxPF1QZ5qe2lGWdY2+bz7y0g= +github.com/gin-contrib/cors v1.4.0/go.mod h1:bs9pNM0x/UsmHPBWT2xZz9ROh8xYjYkiURUfmBoMlcs= +github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= +github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= +github.com/gin-gonic/gin v1.6.3/go.mod h1:75u5sXoLsGZoRN5Sgbi1eraJ4GU3++wFwWzhwvtwp4M= +github.com/gin-gonic/gin v1.8.1 h1:4+fr/el88TOO3ewCmQr8cx/CtZ/umlIRIs5M4NTNjf8= +github.com/gin-gonic/gin v1.8.1/go.mod h1:ji8BvRH1azfM+SYow9zQ6SZMvR8qOMZHmsCuWR9tTTk= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.10.0/go.mod h1:xUsJbQ/Fp4kEt7AFgCuvyX4a71u8h9jB8tj/ORgOZ7o= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A= +github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= +github.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb8WugfUU= +github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= +github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= +github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho= +github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA= +github.com/go-playground/validator/v10 v10.2.0/go.mod h1:uOYAAleCW8F/7oMFd6aG0GOhaH6EGOAJShg8Id5JGkI= +github.com/go-playground/validator/v10 v10.10.0 h1:I7mrTYv78z8k8VXa/qJlOlEXn/nBh+BF8dHX5nt/dr0= +github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos= +github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= +github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= +github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= +github.com/goccy/go-json v0.9.7 h1:IcB+Aqpx/iMHu5Yooh7jEzJk1JZ7Pjtmys2ukPr7EeM= +github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= +github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= +github.com/grpc-ecosystem/go-grpc-middleware v1.2.2/go.mod h1:EaizFBKfUKtMIF5iaDEhniwNedqGo9FuLFzppDr3uwI= +github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= +github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE= +github.com/hashicorp/consul/sdk v0.3.0/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= +github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= +github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= +github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= +github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg= +github.com/improbable-eng/grpc-web v0.15.0 h1:BN+7z6uNXZ1tQGcNAuaU1YjsLTApzkjt2tzCixLaUPQ= +github.com/improbable-eng/grpc-web v0.15.0/go.mod h1:1sy9HKV4Jt9aEs9JSnkWlRJPuPtwNr0l57L4f878wP8= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/influxdata/influxdb1-client v0.0.0-20191209144304-8bf82d3c094d/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo= github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= github.com/jinzhu/now v1.1.4/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= +github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= +github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= +github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= +github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= +github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= github.com/k0kubun/go-ansi v0.0.0-20180517002512-3bf9e2903213/go.mod h1:vNUNkEQ1e29fT/6vq2aBdFsgNPmy8qMdSay1npru+Sw= -github.com/klauspost/cpuid/v2 v2.0.4 h1:g0I61F2K2DjRHz1cnxlkNSBIaePVoJIjjnHui8QHbiw= +github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.10.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.11.7 h1:0hzRabrMN4tSTvMfnL3SCv1ZGeAP23ynzodBgaHeMeg= +github.com/klauspost/compress v1.11.7/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/cpuid/v2 v2.0.4/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.1.1 h1:t0wUqjowdm8ezddV5k0tLWVklVuvLJpoHeb4WBdydm0= +github.com/klauspost/cpuid/v2 v2.1.1/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY= +github.com/klauspost/cpuid/v2 v2.2.2 h1:xPMwiykqNK9VK0NYC3+jTMYv9I6Vl3YdjZgPZKG3zO0= +github.com/klauspost/cpuid/v2 v2.2.2/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= +github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= +github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= +github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= +github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM= +github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4= +github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/mattn/go-isatty v0.0.16 h1:bq3VjFmv/sOjHtdEhmkEV4x1AJtvUvOJ2PFAZ5+peKQ= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU= github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/mattn/go-runewidth v0.0.14 h1:+xnbZSEeDbOIg5/mE6JF0w6n9duR1l3/WmbinWVwUuU= +github.com/mattn/go-runewidth v0.0.14/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= github.com/mattn/go-sqlite3 v1.14.12 h1:TJ1bhYJPV44phC+IMu1u2K/i5RriLTPe+yc68XDJ1Z0= github.com/mattn/go-sqlite3 v1.14.12/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= github.com/minio/sha256-simd v1.0.0 h1:v1ta+49hkWZyvaKwrQB8elexRqm6Y0aMLjCNsrYxo6g= github.com/minio/sha256-simd v1.0.0/go.mod h1:OuYzVNI5vcoYIAmbIvHPl3N3jUzVedXbKy5RFepssQM= +github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ= github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw= +github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= +github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= +github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/grpc-proxy v0.0.0-20181017164139-0f1106ef9c76/go.mod h1:x5OoJHDHqxHS801UIuhqGl6QdSAEJvtausosHSdazIo= +github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg= +github.com/nats-io/jwt v0.3.2/go.mod h1:/euKqTS1ZD+zzjYrY7pseZrTtWQSjujC7xjPc8wL6eU= +github.com/nats-io/nats-server/v2 v2.1.2/go.mod h1:Afk+wRZqkMQs/p45uXdrVLuab3gwv3Z8C4HTBu8GD/k= +github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzEE/Zbp4w= +github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= +github.com/nats-io/nkeys v0.1.3/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= +github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= +github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs= +github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= +github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= +github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= +github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk= +github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis= +github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74= +github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/openzipkin-contrib/zipkin-go-opentracing v0.4.5/go.mod h1:/wsWhb9smxSfWAKL3wpBW7V8scJMt8N8gnaMCS9E/cA= +github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw= +github.com/openzipkin/zipkin-go v0.2.1/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= +github.com/openzipkin/zipkin-go v0.2.2/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= +github.com/pact-foundation/pact-go v1.0.4/go.mod h1:uExwJY4kCzNPcHRj+hCR/HBbOOIwwtUjcrb0b5/5kLM= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= +github.com/pelletier/go-toml/v2 v2.0.1 h1:8e3L2cCQzLFi2CR4g7vGFuFxX7Jl1kKX8gW+iV0GUKU= +github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo= +github.com/performancecopilot/speed v3.0.0+incompatible/go.mod h1:/CLtqpZ5gBg1M9iaPbIdPPGyKcA8hKdoy6hAWba7Yac= +github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc= +github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.3.0/go.mod h1:hJaj2vgQTGQmVCsAACORcieXFeDPbaTKGT+JTgUa3og= +github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.1.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.7.0/go.mod h1:DjGbpBbp5NYNiECxcL/VnbXCCaQpKd3tt26CguLLsqA= +github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/common v0.15.0/go.mod h1:U+gB1OBLb1lF3O42bTCL+FK18tX9Oar16Clt/msog/s= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= +github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.3.0/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= -github.com/rivo/uniseg v0.3.4 h1:3Z3Eu6FGHZWSfNKJTOUiPatWwfc7DzJRU04jFUqJODw= -github.com/rivo/uniseg v0.3.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= -github.com/schollz/progressbar/v3 v3.10.1 h1:6A8v8TIcCJL4yemlUJS9gdcpZ++Gy6toOh1JzKQkz+U= -github.com/schollz/progressbar/v3 v3.10.1/go.mod h1:R2djRgv58sn00AGysc4fN0ip4piOGd3z88K+zVBjczs= +github.com/rivo/uniseg v0.4.2 h1:YwD0ulJSJytLpiaWua0sBDusfsCZohxjxzVTYjwxfV8= +github.com/rivo/uniseg v0.4.2/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +github.com/rivo/uniseg v0.4.3 h1:utMvzDsuh3suAEnhH0RdHmoPbU648o6CvXxTx4SBMOw= +github.com/rivo/uniseg v0.4.3/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= +github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= +github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= +github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik= +github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= +github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E= +github.com/schollz/progressbar/v3 v3.11.0 h1:3nIBUF1Zw/pGUaRHP7PZWmARP7ZQbWQ6vL6hwoQiIvU= +github.com/schollz/progressbar/v3 v3.11.0/go.mod h1:R2djRgv58sn00AGysc4fN0ip4piOGd3z88K+zVBjczs= +github.com/schollz/progressbar/v3 v3.12.2 h1:yLqqqpQNMxGxHY8uEshRihaHWwa0rf0yb7/Zrpgq2C0= +github.com/schollz/progressbar/v3 v3.12.2/go.mod h1:HFJYIYQQJX32UJdyoigUl19xoV6aMwZt6iX/C30RWfg= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= +github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0= github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= +github.com/sony/gobreaker v0.4.1/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= +github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/streadway/amqp v0.0.0-20190404075320-75d898a42a94/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= +github.com/streadway/amqp v0.0.0-20190827072141-edfb9018d271/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= +github.com/streadway/handy v0.0.0-20190108123426-d5acb3125c2a/go.mod h1:qNTQ5P5JnDBl6z3cMAg/SywNDC5ABu5ApDIw6lUbRmI= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= +github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M= +github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= +github.com/ugorji/go/codec v1.2.7 h1:YPXUKf7fYbp/y8xloBqZOw2qaVggbfwMlI8WM3wZUJ0= +github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY= +github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= +github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= +github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= +go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= +go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg= +go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= +go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= +go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= +go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97 h1:/UOmuWzQfxxo9UtlXMwuQU8CMgg1eZXqTRwkSQJWKOI= +golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20200331195152-e8c3332aa8e5/go.mod h1:4M0jN8W1tt0AVLNr8HDosyJCDCDuyL9N9+3m7wDWgKw= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200421231249-e086a090c8fd/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd h1:O7DYs+zxREGLKzKoMQrtrEacpb0ZVXA5rIwylE2Xchk= +golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b h1:PxfKdU9lEEDYjdIzOtC4qFWgkU2rGHdKlKowJSMN9h0= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191220142924-d4481acd189f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200420163511-1957bb5e6d1f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220829200755-d48e67d00261 h1:v6hYoSR9T5oet+pMXwUWkbiVqx/63mlHjefrHmxwfeY= golang.org/x/sys v0.0.0-20220829200755-d48e67d00261/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220913175220-63ea55921009 h1:PuvuRMeLWqsf/ZdT1UUZz0syhioyv1mzuFZsXs4fvhw= +golang.org/x/sys v0.0.0-20220913175220-63ea55921009/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.3.0 h1:w8ZOecv6NaNa/zC8944JTU3vz4u6Lagfk4RPQxv92NQ= +golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20220722155259-a9ba230a4035 h1:Q5284mrmYTpACcm+eAKjKJH48BBwSyfJqmmGDTtT8Vc= golang.org/x/term v0.0.0-20220722155259-a9ba230a4035/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.3.0 h1:qoo4akIqOcDME5bhc/NgxUdovd6BSS2uMsVjB56q1xI= +golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.4.0 h1:BrVqGRd7+k1DiOgtnFvAkoQEWQvBc25ouMJM6429SFg= +golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190530194941-fb225487d101/go.mod h1:z3L6/3dTEVtUr6QSP8miRzeRqwQOioJ9I66odjN4I7s= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013 h1:+kGHl1aib/qcwaRi1CbqBZ1rk19r85MNUf8HaBghugY= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20210126160654-44e461bb6506 h1:uLBY0yHDCj2PMQ98KWDSIDFwn9zK2zh+tgWtbvPPBjI= +google.golang.org/genproto v0.0.0-20210126160654-44e461bb6506/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.22.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.32.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.51.0 h1:E1eGv1FTqoLIdnBCZufiSHgKjlqG6fKFf6pPWtMTh8U= +google.golang.org/grpc v1.51.0/go.mod h1:wgNDFcnuBGmxLKI/qn4T+m5BtEBYXJPvibbUPsAIPww= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.0 h1:w43yiav+6bVFTBQFZX0r7ipe9JQ1QsbMgHwbBziscLw= +google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o= +gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= +gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gorm.io/driver/mysql v1.3.6 h1:BhX1Y/RyALb+T9bZ3t07wLnPZBukt+IRkMn8UZSNbGM= gorm.io/driver/mysql v1.3.6/go.mod h1:sSIebwZAVPiT+27jK9HIwvsqOGKx3YMPmrA3mBJR10c= gorm.io/driver/sqlite v1.3.6 h1:Fi8xNYCUplOqWiPa3/GuCeowRNBRGTf62DEmhMDHeQQ= @@ -54,3 +622,11 @@ gorm.io/driver/sqlite v1.3.6/go.mod h1:Sg1/pvnKtbQ7jLXxfZa+jSHvoX8hoZA8cn4xllOMT gorm.io/gorm v1.23.4/go.mod h1:l2lP/RyAtc1ynaTjFksBde/O8v9oOGIApu2/xRitmZk= gorm.io/gorm v1.23.8 h1:h8sGJ+biDgBA1AD1Ha9gFCx7h8npU7AsLdlkX0n2TpE= gorm.io/gorm v1.23.8/go.mod h1:l2lP/RyAtc1ynaTjFksBde/O8v9oOGIApu2/xRitmZk= +honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +nhooyr.io/websocket v1.8.6 h1:s+C3xAMLwGmlI31Nyn/eAehUlZPwfYZu2JXM621Q5/k= +nhooyr.io/websocket v1.8.6/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0= +sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= +sourcegraph.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU= diff --git a/library/file.go b/library/file.go index 40bb61b..a727650 100644 --- a/library/file.go +++ b/library/file.go @@ -1,10 +1,475 @@ package library -type File struct { - ID int64 `gorm:"primaryKey;autoIncrement"` - Path string `gorm:"type:varchar(4096)"` +import ( + "context" + "encoding/binary" + "errors" + "fmt" + "io/fs" + "path" + "strings" + "time" - Name string `gorm:"type:varchar(256)"` - Hash []byte `gorm:"type:varbinary(32)"` // sha256 - Size int64 + mapset "github.com/deckarep/golang-set/v2" + "github.com/sirupsen/logrus" + "gorm.io/gorm" +) + +var ( + ModelFile = new(File) + SignatureV1Header = []byte{0x01} + + ErrFileNotFound = fmt.Errorf("get file: file not found") + ErrMkdirNonDirFileExists = fmt.Errorf("mkdir: non dir exists") + ErrMkdirDirExists = fmt.Errorf("mkdir: dir exists") + ErrNewFileFileExists = fmt.Errorf("new file: file exists") + + Root = &File{ID: 0} +) + +type File struct { + ID int64 `gorm:"primaryKey;autoIncrement" json:"id,omitempty"` + ParentID int64 `gorm:"index:idx_parent_name,unique" json:"parent_id,omitempty"` + + Name string `gorm:"type:varchar(256);index:idx_parent_name,unique" json:"name,omitempty"` + Mode uint32 `json:"mode,omitempty"` + ModTime time.Time `json:"mod_time,omitempty"` + Hash []byte `gorm:"type:varbinary(32)" json:"hash,omitempty"` // sha256 + Size int64 `json:"size,omitempty"` + + Signature []byte `gorm:"type:varbinary(256);index:idx_signature" json:"signature,omitempty"` // sha256 + size +} + +func (l *Library) MkdirAll(ctx context.Context, parentID int64, name string, perm fs.FileMode) (*File, error) { + return l.mkdirAll(ctx, l.db.WithContext(ctx), parentID, name, perm) +} + +func (l *Library) mkdirAll(ctx context.Context, tx *gorm.DB, parentID int64, name string, perm fs.FileMode) (*File, error) { + name = path.Clean(strings.TrimSpace(name)) + if strings.ContainsAny(name, "\\") || name == "" { + return nil, fmt.Errorf("unexpected mkdir path, '%s'", name) + } + + current := Root + if parentID != 0 { + f, err := l.getFile(ctx, tx, parentID) + if err != nil { + return nil, err + } + current = f + } + + parts := strings.Split(name, "/") + for _, part := range parts { + part = strings.TrimSpace(part) + if part == "" { + continue + } + + next, err := l.mkdir(ctx, tx, current.ID, part, perm) + if err != nil && !errors.Is(err, ErrMkdirDirExists) { + return nil, fmt.Errorf("mkdir fail, %w", err) + } + + current = next + } + + return current, nil +} + +func (l *Library) mkdir(ctx context.Context, tx *gorm.DB, parentID int64, name string, perm fs.FileMode) (*File, error) { + perm = fs.ModePerm & perm + + origin := new(File) + if r := tx.Where("parent_id = ? AND name = ?", parentID, name).Find(origin); r.Error != nil { + return nil, fmt.Errorf("mkdir: find origin fail, err= %w", r.Error) + } + if origin.ID != 0 { + if fs.FileMode(origin.Mode).IsDir() { + return origin, ErrMkdirDirExists + } + return nil, ErrMkdirNonDirFileExists + } + + dir := &File{ + ParentID: parentID, + Name: name, + Mode: uint32(fs.ModeDir | perm), + ModTime: time.Now(), + } + if r := tx.Create(dir); r.Error != nil { + return nil, fmt.Errorf("create fail, err= %w", r.Error) + } + + return dir, nil +} + +func (l *Library) GetFile(ctx context.Context, id int64) (*File, error) { + return l.getFile(ctx, l.db.WithContext(ctx), id) +} + +func (l *Library) getFile(ctx context.Context, tx *gorm.DB, id int64) (*File, error) { + files, err := l.mGetFile(ctx, tx, id) + if err != nil { + return nil, err + } + + f, ok := files[id] + if !ok || f == nil { + return nil, ErrFileNotFound + } + + return f, nil +} + +func (l *Library) SaveFile(ctx context.Context, file *File) error { + return l.db.WithContext(ctx).Save(file).Error +} + +func (l *Library) MoveFile(ctx context.Context, file *File) error { + return l.db.WithContext(ctx).Transaction(func(tx *gorm.DB) error { + return l.moveFile(ctx, tx, file) + }) +} + +func (l *Library) moveFile(ctx context.Context, tx *gorm.DB, file *File) error { + origin, err := l.getByName(ctx, tx, file.ParentID, file.Name) + if err != nil { + return err + } + if origin == nil { + return tx.Save(file).Error + } + + if !fs.FileMode(origin.Mode).IsDir() { + return fmt.Errorf("same name file exists, name= '%s'", file.Name) + } + if !fs.FileMode(file.Mode).IsDir() { + return fmt.Errorf("same name file is a dir, name= '%s", file.Name) + } + + children, err := l.list(ctx, tx, file.ID) + if err != nil { + return err + } + for _, child := range children { + child.ParentID = origin.ID + if err := l.moveFile(ctx, tx, child); err != nil { + return err + } + } + + if file.ModTime.After(origin.ModTime) { + origin.ModTime = file.ModTime + if err := tx.Save(origin).Error; err != nil { + return err + } + } + if err := tx.Delete(file).Error; err != nil { + return err + } + + return nil +} + +func (l *Library) Delete(ctx context.Context, ids []int64) error { + files, err := l.MGetFile(ctx, ids...) + if err != nil { + panic(err) + } + + moveToTrash := make([]*File, 0, len(files)) +outter: + for _, file := range files { + if file.ID == TrashFileID { + continue + } + parents, err := l.ListParents(ctx, file.ID) + if err != nil { + panic(err) + } + if len(parents) == 0 { + moveToTrash = append(moveToTrash, file) + continue + } + if parents[0].ID != TrashFileID { + moveToTrash = append(moveToTrash, file) + continue + } + if !fs.FileMode(file.Mode).IsDir() { + continue + } + + needDelete := make([]*File, 0, 8) + current := []*File{file} + for len(current) > 0 { + next := make([]*File, 0, 8) + for _, file := range current { + children, err := l.List(ctx, file.ID) + if err != nil { + return err + } + for _, child := range children { + if !fs.FileMode(child.Mode).IsDir() { + continue outter + } + } + next = append(next, children...) + } + + needDelete = append(needDelete, current...) + current = next + } + + if err := l.db.WithContext(ctx).Delete(needDelete).Error; err != nil { + return err + } + } + if len(moveToTrash) == 0 { + return nil + } + + trash, err := l.newTrash(ctx, l.db.WithContext(ctx)) + if err != nil { + return err + } + + for _, file := range moveToTrash { + file.ParentID = trash.ID + if err := l.MoveFile(ctx, file); err != nil { + return err + } + } + + return nil +} + +const ( + TrashFileID = -1 +) + +func (l *Library) newTrash(ctx context.Context, tx *gorm.DB) (*File, error) { + now := time.Now() + trash := &File{ + ID: TrashFileID, + Name: ".Trash", + Mode: uint32(fs.ModePerm | fs.ModeDir), + ModTime: now, + } + if err := tx.Save(trash).Error; err != nil { + return nil, err + } + + return l.mkdir(ctx, tx, trash.ID, now.Format(time.RFC3339), fs.ModePerm) +} + +func (l *Library) MGetFile(ctx context.Context, ids ...int64) (map[int64]*File, error) { + return l.mGetFile(ctx, l.db.WithContext(ctx), ids...) +} + +func (l *Library) mGetFile(ctx context.Context, tx *gorm.DB, ids ...int64) (map[int64]*File, error) { + if len(ids) == 0 { + return map[int64]*File{}, nil + } + + files := make([]*File, 0, len(ids)) + if r := tx.Where("id IN (?)", ids).Find(&files); r.Error != nil { + return nil, fmt.Errorf("find files fail, %w", r.Error) + } + + results := make(map[int64]*File, len(files)) + for _, f := range files { + results[f.ID] = f + } + + return results, nil +} + +func (l *Library) GetByPath(ctx context.Context, parentID int64, name string) (*File, error) { + name = path.Clean(strings.TrimSpace(name)) + if strings.ContainsAny(name, "\\") || name == "" { + return nil, fmt.Errorf("unexpected mkdir path, '%s'", name) + } + + current := Root + if parentID != 0 { + f, err := l.GetFile(ctx, parentID) + if err != nil { + return nil, err + } + current = f + } + + parts := strings.Split(name, "/") + for _, part := range parts { + part = strings.TrimSpace(part) + if part == "" { + continue + } + + next, err := l.GetByName(ctx, current.ID, part) + if err != nil { + return nil, fmt.Errorf("get by path fail, %w", err) + } + if next == nil { + return nil, nil + } + + current = next + } + + return current, nil +} + +func (l *Library) GetByName(ctx context.Context, parentID int64, name string) (*File, error) { + return l.getByName(ctx, l.db.WithContext(ctx), parentID, name) +} + +func (l *Library) getByName(ctx context.Context, tx *gorm.DB, parentID int64, name string) (*File, error) { + file := new(File) + if r := tx.Where("parent_id = ? AND name = ?", parentID, name).Find(file); r.Error != nil { + return nil, fmt.Errorf("find files fail, %w", r.Error) + } + if file.ID == 0 { + return nil, nil + } + return file, nil +} + +func (l *Library) List(ctx context.Context, parentID int64) ([]*File, error) { + return l.list(ctx, l.db.WithContext(ctx), parentID) +} + +func (l *Library) list(ctx context.Context, tx *gorm.DB, parentID int64) ([]*File, error) { + files := make([]*File, 0, 4) + if r := tx.Where("parent_id = ?", parentID).Order("name").Find(&files); r.Error != nil { + return nil, fmt.Errorf("find files fail, %w", r.Error) + } + return files, nil +} + +func (l *Library) ListParents(ctx context.Context, id int64) ([]*File, error) { + return l.listParnets(ctx, l.db.WithContext(ctx), id) +} + +func (l *Library) listParnets(ctx context.Context, tx *gorm.DB, id int64) ([]*File, error) { + result := make([]*File, 0, 3) + + currentID := id + for i := 0; i < 32 && currentID != 0; i++ { + file, err := l.getFile(ctx, tx, currentID) + if err != nil { + return nil, err + } + + result = append(result, file) + currentID = file.ParentID + } + + num := len(result) + if num <= 1 { + return result, nil + } + for i := 0; i < num/2; i++ { + result[i], result[num-i-1] = result[num-i-1], result[i] + } + + return result, nil +} + +func (l *Library) Search(ctx context.Context, name string) ([]*File, error) { + files := make([]*File, 0, 4) + if r := l.db.WithContext(ctx).Where("name LIKE ?", fmt.Sprintf("%"+name+"%")).Order("name").Limit(100).Find(&files); r.Error != nil { + return nil, fmt.Errorf("find files fail, %w", r.Error) + } + return files, nil +} + +func (l *Library) TrimFiles(ctx context.Context) error { + for { + positions := make([]*Position, 0, batchSize) + if r := l.db.WithContext(ctx).Where("file_id = ?", 0).Limit(batchSize).Find(&positions); r.Error != nil { + return fmt.Errorf("list non file position fail, err= %w", r.Error) + } + if len(positions) == 0 { + return nil + } + + signatures := make([][]byte, 0, len(positions)) + sign2positions := make(map[string]*Position, len(positions)) + for _, posi := range positions { + size := make([]byte, 8) + binary.BigEndian.PutUint64(size, uint64(posi.Size)) + + sign := make([]byte, 0, 64) + sign = append(sign, SignatureV1Header...) + sign = append(sign, posi.Hash...) + sign = append(sign, size...) + + signatures = append(signatures, sign) + sign2positions[string(sign)] = posi + } + + matched := make([]*File, 0, 4) + if r := l.db.WithContext(ctx).Where("signature IN (?)", signatures).Find(&matched); r.Error != nil { + return fmt.Errorf("get matched file fail, err= %w", r.Error) + } + + for _, file := range matched { + posi, has := sign2positions[string(file.Signature)] + if !has { + continue + } + + posi.FileID = file.ID + l.db.WithContext(ctx).Save(posi) + + delete(sign2positions, string(file.Signature)) + } + + tapeIDs := mapset.NewThreadUnsafeSet[int64]() + for _, posi := range sign2positions { + tapeIDs.Add(posi.TapeID) + } + + tapes, err := l.MGetTape(ctx, tapeIDs.ToSlice()...) + if err != nil { + return fmt.Errorf("mget tape, ids= %v, %w", tapeIDs.ToSlice(), err) + } + + for sign, posi := range sign2positions { + tape := tapes[posi.TapeID] + if tape == nil { + logrus.WithContext(ctx).Warnf("trim file, tape not found, tape_id= %d", posi.TapeID) + continue + } + + dirname, filename := path.Split(fmt.Sprintf("Unforged/%s/%s", tape.Barcode, posi.Path)) + dir, err := l.MkdirAll(ctx, Root.ID, dirname, 0x777) + if err != nil { + return fmt.Errorf("mkdir, %w", err) + } + + origin := new(File) + if r := l.db.WithContext(ctx).Where("parent_id = ? AND name = ?", dir.ID, filename).Find(origin); r.Error != nil { + return fmt.Errorf("new file: find origin fail, err= %w", r.Error) + } + if origin.ID != 0 { + return ErrNewFileFileExists + } + + file := &File{ + ParentID: dir.ID, + Name: filename, + Mode: posi.Mode, + ModTime: time.Now(), + Hash: posi.Hash, + Size: posi.Size, + Signature: []byte(sign), + } + if r := l.db.WithContext(ctx).Create(file); r.Error != nil { + return fmt.Errorf("new file: create fail, err= %w", r.Error) + } + } + } } diff --git a/library/library.go b/library/library.go index fd5f99f..7d42959 100644 --- a/library/library.go +++ b/library/library.go @@ -4,11 +4,18 @@ import ( "gorm.io/gorm" ) +const ( + batchSize = 100 +) + type Library struct { - db *gorm.DB - prefix string + db *gorm.DB } -func NewLibrary(db *gorm.DB, prefix string) *Library { - return &Library{db: db, prefix: prefix} +func New(db *gorm.DB) *Library { + return &Library{db: db} +} + +func (l *Library) AutoMigrate() error { + return l.db.AutoMigrate(ModelFile, ModelPosition, ModelTape) } diff --git a/library/position.go b/library/position.go index 0ffd9ec..daf6ef1 100644 --- a/library/position.go +++ b/library/position.go @@ -1,27 +1,52 @@ package library import ( + "context" + "fmt" "time" "gorm.io/gorm" ) +var ( + ModelPosition = new(Position) +) + type Position struct { - ID int64 `gorm:"primaryKey;autoIncrement"` - FileID int64 - TapeID int64 - Path string `gorm:"type:varchar(4096)"` + ID int64 `gorm:"primaryKey;autoIncrement" json:"id,omitempty"` + FileID int64 `gorm:"index:idx_file_id" json:"file_id,omitempty"` + TapeID int64 `gorm:"index:idx_tape_path" json:"tape_id,omitempty"` + Path string `gorm:"type:varchar(4096);index:idx_tape_path" json:"path,omitempty"` - Mode uint32 - ModTime time.Time - WriteTime time.Time - Size int64 - Hash []byte `gorm:"type:varbinary(32)"` // sha256 + Mode uint32 `json:"mode,omitempty"` + ModTime time.Time `json:"mod_time,omitempty"` + WriteTime time.Time `json:"write_time,omitempty"` + Size int64 `json:"size,omitempty"` + Hash []byte `gorm:"type:varbinary(32)" json:"hash,omitempty"` // sha256 } -func (l *Library) PositionScope(db *gorm.DB) *gorm.DB { - if l.prefix == "" { - return db +func (l *Library) GetPositionByFileID(ctx context.Context, fileID int64) ([]*Position, error) { + results, err := l.MGetPositionByFileID(ctx, l.db.WithContext(ctx), fileID) + if err != nil { + panic(err) } - return db.Table(l.prefix + "_position") + return results[fileID], nil +} + +func (l *Library) MGetPositionByFileID(ctx context.Context, tx *gorm.DB, fileIDs ...int64) (map[int64][]*Position, error) { + if len(fileIDs) == 0 { + return map[int64][]*Position{}, nil + } + + positions := make([]*Position, 0, len(fileIDs)) + if r := tx.Where("file_id IN (?)", fileIDs).Find(&positions); r.Error != nil { + return nil, fmt.Errorf("find position by file id fail, %w", r.Error) + } + + results := make(map[int64][]*Position, len(positions)) + for _, posi := range positions { + results[posi.FileID] = append(results[posi.FileID], posi) + } + + return results, nil } diff --git a/library/tape.go b/library/tape.go index 26952fa..0fa86ae 100644 --- a/library/tape.go +++ b/library/tape.go @@ -1,26 +1,25 @@ package library import ( + "context" + "fmt" "os" "time" +) - "gorm.io/gorm" +var ( + ModelTape = new(Tape) ) type Tape struct { - ID int64 `gorm:"primaryKey;autoIncrement"` - Barcode string - Name string - Encryption string - CreateTimestamp int64 - DestroyTimestamp int64 -} - -func (l *Library) TapeScope(db *gorm.DB) *gorm.DB { - if l.prefix == "" { - return db - } - return db.Table(l.prefix + "_tape") + ID int64 `gorm:"primaryKey;autoIncrement" json:"id,omitempty"` + Barcode string `gorm:"type:varchar(15);index:idx_barcode,unique" json:"barcode,omitempty"` + Name string `gorm:"type:varchar(256)" json:"name,omitempty"` + Encryption string `gorm:"type:varchar(2048)" json:"encryption,omitempty"` + CreateTime time.Time `json:"create_time,omitempty"` + DestroyTime *time.Time `json:"destroy_time,omitempty"` + CapacityBytes int64 `json:"capacity_bytes,omitempty"` + WritenBytes int64 `json:"writen_bytes,omitempty"` } type TapeFile struct { @@ -32,14 +31,85 @@ type TapeFile struct { Hash []byte `json:"hash"` // sha256 } -// func (l *Library) SaveTape(ctx context.Context, tape *Tape, files []*TapeFile) (*Tape, error) { -// if r := l.db.WithContext(ctx).Scopes(l.TapeScope).Save(tape); r.Error != nil { -// return nil, fmt.Errorf("save tape fail, err= %w", r.Error) -// } +func (l *Library) CreateTape(ctx context.Context, tape *Tape, files []*TapeFile) (*Tape, error) { + tape.WritenBytes = 0 + for _, file := range files { + tape.WritenBytes += file.Size + } + if tape.CapacityBytes == 0 { + tape.CapacityBytes = tape.WritenBytes + } -// positions := make([]*Position, 0, len(files)) -// for _, file := range files { + if r := l.db.WithContext(ctx).Save(tape); r.Error != nil { + return nil, fmt.Errorf("save tape fail, err= %w", r.Error) + } -// } -// l.db.WithContext(ctx).Scopes(l.PositionScope).CreateBatchSize() -// } + positions := make([]*Position, 0, len(files)) + for _, file := range files { + positions = append(positions, &Position{ + TapeID: tape.ID, + Path: file.Path, + Mode: uint32(file.Mode), + ModTime: file.ModTime, + WriteTime: file.WriteTime, + Size: file.Size, + Hash: file.Hash, + }) + } + + if r := l.db.WithContext(ctx).CreateInBatches(positions, batchSize); r.Error != nil { + return nil, fmt.Errorf("save tape position fail, %w", r.Error) + } + + return tape, nil +} + +func (l *Library) GetTape(ctx context.Context, id int64) (*Tape, error) { + tapes, err := l.MGetTape(ctx, id) + if err != nil { + return nil, err + } + + tape, ok := tapes[id] + if !ok || tape == nil { + return nil, ErrFileNotFound + } + + return tape, nil +} + +func (l *Library) MGetTape(ctx context.Context, tapeIDs ...int64) (map[int64]*Tape, error) { + if len(tapeIDs) == 0 { + return map[int64]*Tape{}, nil + } + + tapes := make([]*Tape, 0, len(tapeIDs)) + if r := l.db.WithContext(ctx).Where("id IN (?)", tapeIDs).Find(&tapes); r.Error != nil { + return nil, fmt.Errorf("mget tapes fail, err= %w", r.Error) + } + + result := make(map[int64]*Tape, len(tapes)) + for _, tape := range tapes { + result[tape.ID] = tape + } + + return result, nil +} + +func (l *Library) MGetTapeByBarcode(ctx context.Context, barcodes ...string) (map[string]*Tape, error) { + if len(barcodes) == 0 { + return map[string]*Tape{}, nil + } + + tapes := make([]*Tape, 0, len(barcodes)) + if r := l.db.WithContext(ctx).Where("barcode IN (?)", barcodes).Find(&tapes); r.Error != nil { + return nil, fmt.Errorf("mget tapes by barcode fail, err= %w", r.Error) + } + + result := make(map[string]*Tape, len(tapes)) + for _, tape := range tapes { + result[tape.Barcode] = tape + } + + return result, nil +} diff --git a/maketape b/maketape deleted file mode 100755 index dfb6683..0000000 --- a/maketape +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env bash -set -ex; - -echo "format tape as number '$1', name '$2'" -echo "copy '$3' to tape" - -stenc -f /dev/st0 -e on -k /root/tape.key -a 1 --ckod -sleep 3 -mkltfs -f -d /dev/st0 -s $1 -n $2 -sleep 3 -ltfs -o noatime -o sync_type=unmount -o work_directory=/opt/ltfs -o capture_index -o min_pool_size=256 -o max_pool_size=1024 -o eject /ltfs -sleep 3 -ordercp $3 /ltfs/ -sleep 3 -umount /ltfs diff --git a/mmap/manual_test_program.go b/mmap/manual_test_program.go deleted file mode 100644 index a1ab17b..0000000 --- a/mmap/manual_test_program.go +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build ignore -// +build ignore - -// -// This build tag means that "go build" does not build this file. Use "go run -// manual_test_program.go" to run it. -// -// You will also need to change "debug = false" to "debug = true" in mmap_*.go. - -package main - -import ( - "log" - "math/rand" - "time" - - "golang.org/x/exp/mmap" -) - -var garbage []byte - -func main() { - const filename = "manual_test_program.go" - - for _, explicitClose := range []bool{false, true} { - r, err := mmap.Open(filename) - if err != nil { - log.Fatalf("Open: %v", err) - } - if explicitClose { - r.Close() - } else { - // Leak the *mmap.ReaderAt returned by mmap.Open. The finalizer - // should pick it up, if finalizers run at all. - } - } - - println("Finished all explicit Close calls.") - println("Creating and collecting garbage.") - println("Look for two munmap log messages.") - println("Hit Ctrl-C to exit.") - - rng := rand.New(rand.NewSource(1)) - now := time.Now() - for { - garbage = make([]byte, rng.Intn(1<<20)) - if time.Since(now) > 1*time.Second { - now = time.Now() - print(".") - } - } -} diff --git a/mmap/mmap_darwin.go b/mmap/mmap_darwin.go deleted file mode 100644 index 1882efb..0000000 --- a/mmap/mmap_darwin.go +++ /dev/null @@ -1,136 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build darwin -// +build darwin - -// Package mmap provides a way to memory-map a file. -package mmap - -import ( - "errors" - "fmt" - "io" - "os" - "runtime" - "syscall" -) - -// debug is whether to print debugging messages for manual testing. -// -// The runtime.SetFinalizer documentation says that, "The finalizer for x is -// scheduled to run at some arbitrary time after x becomes unreachable. There -// is no guarantee that finalizers will run before a program exits", so we -// cannot automatically test that the finalizer runs. Instead, set this to true -// when running the manual test. -const debug = false - -// ReaderAt reads a memory-mapped file. -// -// Like any io.ReaderAt, clients can execute parallel ReadAt calls, but it is -// not safe to call Close and reading methods concurrently. -type ReaderAt struct { - data []byte -} - -// Close closes the reader. -func (r *ReaderAt) Close() error { - if r.data == nil { - return nil - } - data := r.data - r.data = nil - if debug { - var p *byte - if len(data) != 0 { - p = &data[0] - } - println("munmap", r, p) - } - runtime.SetFinalizer(r, nil) - return syscall.Munmap(data) -} - -// Len returns the length of the underlying memory-mapped file. -func (r *ReaderAt) Len() int { - return len(r.data) -} - -// At returns the byte at index i. -func (r *ReaderAt) At(i int) byte { - return r.data[i] -} - -// ReadAt implements the io.ReaderAt interface. -func (r *ReaderAt) ReadAt(p []byte, off int64) (int, error) { - if r.data == nil { - return 0, errors.New("mmap: closed") - } - if off < 0 || int64(len(r.data)) < off { - return 0, fmt.Errorf("mmap: invalid ReadAt offset %d", off) - } - n := copy(p, r.data[off:]) - if n < len(p) { - return n, io.EOF - } - return n, nil -} - -// ReadAt implements the io.ReaderAt interface. -func (r *ReaderAt) Slice(off, limit int64) ([]byte, error) { - if r.data == nil { - return nil, errors.New("mmap: closed") - } - - l := int64(len(r.data)) - if off < 0 || limit < 0 || l < off { - return nil, fmt.Errorf("mmap: invalid ReadAt offset %d", off) - } - - if off+limit > l { - return r.data[off:], nil - } - - return r.data[off : off+limit], nil -} - -// Open memory-maps the named file for reading. -func Open(filename string) (*ReaderAt, error) { - f, err := os.Open(filename) - if err != nil { - return nil, err - } - defer f.Close() - fi, err := f.Stat() - if err != nil { - return nil, err - } - - size := fi.Size() - if size == 0 { - return &ReaderAt{}, nil - } - if size < 0 { - return nil, fmt.Errorf("mmap: file %q has negative size", filename) - } - if size != int64(int(size)) { - return nil, fmt.Errorf("mmap: file %q is too large", filename) - } - - data, err := syscall.Mmap(int(f.Fd()), 0, int(size), syscall.PROT_READ, syscall.MAP_SHARED) - if err != nil { - return nil, fmt.Errorf("create mmap fail, %q, %w", filename, err) - } - - r := &ReaderAt{data} - if debug { - var p *byte - if len(data) != 0 { - p = &data[0] - } - println("mmap", r, p) - } - runtime.SetFinalizer(r, (*ReaderAt).Close) - return r, nil -} diff --git a/mmap/mmap_linux.go b/mmap/mmap_linux.go deleted file mode 100644 index fcaf52f..0000000 --- a/mmap/mmap_linux.go +++ /dev/null @@ -1,145 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build linux -// +build linux - -// Package mmap provides a way to memory-map a file. -package mmap - -import ( - "errors" - "fmt" - "io" - "os" - "runtime" - "syscall" -) - -const ( - prefetchMaxSize = 16 * 1024 * 1024 -) - -// debug is whether to print debugging messages for manual testing. -// -// The runtime.SetFinalizer documentation says that, "The finalizer for x is -// scheduled to run at some arbitrary time after x becomes unreachable. There -// is no guarantee that finalizers will run before a program exits", so we -// cannot automatically test that the finalizer runs. Instead, set this to true -// when running the manual test. -const debug = false - -// ReaderAt reads a memory-mapped file. -// -// Like any io.ReaderAt, clients can execute parallel ReadAt calls, but it is -// not safe to call Close and reading methods concurrently. -type ReaderAt struct { - data []byte -} - -// Close closes the reader. -func (r *ReaderAt) Close() error { - if r.data == nil { - return nil - } - data := r.data - r.data = nil - if debug { - var p *byte - if len(data) != 0 { - p = &data[0] - } - println("munmap", r, p) - } - runtime.SetFinalizer(r, nil) - return syscall.Munmap(data) -} - -// Len returns the length of the underlying memory-mapped file. -func (r *ReaderAt) Len() int { - return len(r.data) -} - -// At returns the byte at index i. -func (r *ReaderAt) At(i int) byte { - return r.data[i] -} - -// ReadAt implements the io.ReaderAt interface. -func (r *ReaderAt) ReadAt(p []byte, off int64) (int, error) { - if r.data == nil { - return 0, errors.New("mmap: closed") - } - if off < 0 || int64(len(r.data)) < off { - return 0, fmt.Errorf("mmap: invalid ReadAt offset %d", off) - } - n := copy(p, r.data[off:]) - if n < len(p) { - return n, io.EOF - } - return n, nil -} - -// ReadAt implements the io.ReaderAt interface. -func (r *ReaderAt) Slice(off, limit int64) ([]byte, error) { - if r.data == nil { - return nil, errors.New("mmap: closed") - } - - l := int64(len(r.data)) - if off < 0 || limit < 0 || l < off { - return nil, fmt.Errorf("mmap: invalid ReadAt offset %d", off) - } - - if off+limit > l { - return r.data[off:], nil - } - - return r.data[off : off+limit], nil -} - -// Open memory-maps the named file for reading. -func Open(filename string) (*ReaderAt, error) { - f, err := os.Open(filename) - if err != nil { - return nil, err - } - defer f.Close() - fi, err := f.Stat() - if err != nil { - return nil, err - } - - size := fi.Size() - if size == 0 { - return &ReaderAt{}, nil - } - if size < 0 { - return nil, fmt.Errorf("mmap: file %q has negative size", filename) - } - if size != int64(int(size)) { - return nil, fmt.Errorf("mmap: file %q is too large", filename) - } - - data, err := syscall.Mmap(int(f.Fd()), 0, int(size), syscall.PROT_READ, syscall.MAP_SHARED) - if err != nil { - return nil, fmt.Errorf("create mmap fail, %q, %w", filename, err) - } - if size <= prefetchMaxSize { - if err := syscall.Madvise(data, syscall.MADV_SEQUENTIAL|syscall.MADV_WILLNEED); err != nil { - return nil, fmt.Errorf("madvise fail, %q, %w", filename, err) - } - } - - r := &ReaderAt{data} - if debug { - var p *byte - if len(data) != 0 { - p = &data[0] - } - println("mmap", r, p) - } - runtime.SetFinalizer(r, (*ReaderAt).Close) - return r, nil -} diff --git a/mmap/mmap_other.go b/mmap/mmap_other.go deleted file mode 100644 index 8c4ef04..0000000 --- a/mmap/mmap_other.go +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !linux && !windows && !darwin -// +build !linux,!windows,!darwin - -// Package mmap provides a way to memory-map a file. -package mmap - -import ( - "fmt" - "os" -) - -// ReaderAt reads a memory-mapped file. -// -// Like any io.ReaderAt, clients can execute parallel ReadAt calls, but it is -// not safe to call Close and reading methods concurrently. -type ReaderAt struct { - f *os.File - len int -} - -// Close closes the reader. -func (r *ReaderAt) Close() error { - return r.f.Close() -} - -// Len returns the length of the underlying memory-mapped file. -func (r *ReaderAt) Len() int { - return r.len -} - -// At returns the byte at index i. -func (r *ReaderAt) At(i int) byte { - if i < 0 || r.len <= i { - panic("index out of range") - } - var b [1]byte - r.ReadAt(b[:], int64(i)) - return b[0] -} - -// ReadAt implements the io.ReaderAt interface. -func (r *ReaderAt) ReadAt(p []byte, off int64) (int, error) { - return r.f.ReadAt(p, off) -} - -// ReadAt implements the io.ReaderAt interface. -func (r *ReaderAt) Slice(off, limit int64) ([]byte, error) { - buf := make([]byte, limit) - n, err := r.ReadAt(buf, off) - if err != nil { - return nil, err - } - return buf[:n], nil -} - -// Open memory-maps the named file for reading. -func Open(filename string) (*ReaderAt, error) { - f, err := os.Open(filename) - if err != nil { - return nil, err - } - fi, err := f.Stat() - if err != nil { - f.Close() - return nil, err - } - - size := fi.Size() - if size < 0 { - f.Close() - return nil, fmt.Errorf("mmap: file %q has negative size", filename) - } - if size != int64(int(size)) { - f.Close() - return nil, fmt.Errorf("mmap: file %q is too large", filename) - } - - return &ReaderAt{ - f: f, - len: int(fi.Size()), - }, nil -} diff --git a/mmap/mmap_test.go b/mmap/mmap_test.go deleted file mode 100644 index 797fc5f..0000000 --- a/mmap/mmap_test.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package mmap - -import ( - "bytes" - "io" - "io/ioutil" - "testing" -) - -func TestOpen(t *testing.T) { - const filename = "mmap_test.go" - r, err := Open(filename) - if err != nil { - t.Fatalf("Open: %v", err) - } - got := make([]byte, r.Len()) - if _, err := r.ReadAt(got, 0); err != nil && err != io.EOF { - t.Fatalf("ReadAt: %v", err) - } - want, err := ioutil.ReadFile(filename) - if err != nil { - t.Fatalf("ioutil.ReadFile: %v", err) - } - if len(got) != len(want) { - t.Fatalf("got %d bytes, want %d", len(got), len(want)) - } - if !bytes.Equal(got, want) { - t.Fatalf("\ngot %q\nwant %q", string(got), string(want)) - } -} diff --git a/mmap/mmap_windows.go b/mmap/mmap_windows.go deleted file mode 100644 index e0d3414..0000000 --- a/mmap/mmap_windows.go +++ /dev/null @@ -1,141 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package mmap provides a way to memory-map a file. -package mmap - -import ( - "errors" - "fmt" - "io" - "os" - "runtime" - "syscall" - "unsafe" -) - -// debug is whether to print debugging messages for manual testing. -// -// The runtime.SetFinalizer documentation says that, "The finalizer for x is -// scheduled to run at some arbitrary time after x becomes unreachable. There -// is no guarantee that finalizers will run before a program exits", so we -// cannot automatically test that the finalizer runs. Instead, set this to true -// when running the manual test. -const debug = false - -// ReaderAt reads a memory-mapped file. -// -// Like any io.ReaderAt, clients can execute parallel ReadAt calls, but it is -// not safe to call Close and reading methods concurrently. -type ReaderAt struct { - data []byte -} - -// Close closes the reader. -func (r *ReaderAt) Close() error { - if r.data == nil { - return nil - } - data := r.data - r.data = nil - if debug { - var p *byte - if len(data) != 0 { - p = &data[0] - } - println("munmap", r, p) - } - runtime.SetFinalizer(r, nil) - return syscall.UnmapViewOfFile(uintptr(unsafe.Pointer(&data[0]))) -} - -// Len returns the length of the underlying memory-mapped file. -func (r *ReaderAt) Len() int { - return len(r.data) -} - -// At returns the byte at index i. -func (r *ReaderAt) At(i int) byte { - return r.data[i] -} - -// ReadAt implements the io.ReaderAt interface. -func (r *ReaderAt) ReadAt(p []byte, off int64) (int, error) { - if r.data == nil { - return 0, errors.New("mmap: closed") - } - if off < 0 || int64(len(r.data)) < off { - return 0, fmt.Errorf("mmap: invalid ReadAt offset %d", off) - } - n := copy(p, r.data[off:]) - if n < len(p) { - return n, io.EOF - } - return n, nil -} - -// ReadAt implements the io.ReaderAt interface. -func (r *ReaderAt) Slice(off, limit int64) ([]byte, error) { - if r.data == nil { - return nil, errors.New("mmap: closed") - } - - l := int64(len(r.data)) - if off < 0 || limit < 0 || l < off { - return nil, fmt.Errorf("mmap: invalid ReadAt offset %d", off) - } - - if off+limit > l { - return r.data[off:], nil - } - - return r.data[off : off+limit], nil -} - -// Open memory-maps the named file for reading. -func Open(filename string) (*ReaderAt, error) { - f, err := os.Open(filename) - if err != nil { - return nil, err - } - defer f.Close() - fi, err := f.Stat() - if err != nil { - return nil, err - } - - size := fi.Size() - if size == 0 { - return &ReaderAt{}, nil - } - if size < 0 { - return nil, fmt.Errorf("mmap: file %q has negative size", filename) - } - if size != int64(int(size)) { - return nil, fmt.Errorf("mmap: file %q is too large", filename) - } - - low, high := uint32(size), uint32(size>>32) - fmap, err := syscall.CreateFileMapping(syscall.Handle(f.Fd()), nil, syscall.PAGE_READONLY, high, low, nil) - if err != nil { - return nil, err - } - defer syscall.CloseHandle(fmap) - ptr, err := syscall.MapViewOfFile(fmap, syscall.FILE_MAP_READ, 0, 0, uintptr(size)) - if err != nil { - return nil, err - } - data := unsafe.Slice((*byte)(unsafe.Pointer(ptr)), size) - - r := &ReaderAt{data: data} - if debug { - var p *byte - if len(data) != 0 { - p = &data[0] - } - println("mmap", r, p) - } - runtime.SetFinalizer(r, (*ReaderAt).Close) - return r, nil -} diff --git a/resource/db.go b/resource/db.go index e5a0622..972b97a 100644 --- a/resource/db.go +++ b/resource/db.go @@ -7,7 +7,6 @@ import ( ) func NewDBConn(dialect, dsn string) (*gorm.DB, error) { - var dialector gorm.Dialector switch dialect { case "mysql": diff --git a/scripts/encrypt b/scripts/encrypt new file mode 100755 index 0000000..6fe447d --- /dev/null +++ b/scripts/encrypt @@ -0,0 +1,10 @@ +#!/usr/bin/env bash +set -ex; + +sleep 5 + +for i in {1..60}; do + stenc -f ${DEVICE} -e on -k ${KEY_FILE} -kd ${TAPE_BARCODE} -a 1 --ckod && break || sleep 5; +done + +sleep 3 diff --git a/encrypttape b/scripts/encrypttape similarity index 100% rename from encrypttape rename to scripts/encrypttape diff --git a/scripts/maketape b/scripts/maketape new file mode 100755 index 0000000..c851b9e --- /dev/null +++ b/scripts/maketape @@ -0,0 +1,45 @@ +#!/usr/bin/env bash +set -e; + +POSITIONAL_ARGS=() +DEVICE="/dev/nst0" +SG_DEVICE=`sg_map | grep ${DEVICE} | awk '{print $1}'` + +while [[ $# -gt 0 ]]; do + case $1 in + -s) + BARCODE="$2" + shift # past argument + shift # past value + ;; + -n) + NAME="$2" + shift # past argument + shift # past value + ;; + -*|--*) + echo "Unknown option $1" + exit 1 + ;; + *) + POSITIONAL_ARGS+=("$1") # save positional arg + shift # past argument + ;; + esac +done + +echo "format tape as number '$BARCODE', name '$NAME'" +echo "copy '${POSITIONAL_ARGS[@]}' to tape" +set -- "${POSITIONAL_ARGS[@]}" + +set -ex; + +stenc -f ${DEVICE} -e on -k /root/tape.key -a 1 --ckod +sleep 3 +mkltfs -f -d ${SG_DEVICE} -s $BARCODE -n $NAME +sleep 3 +ltfs -o devname=${SG_DEVICE} -o noatime -o sync_type=unmount -o work_directory=/opt/ltfs -o capture_index -o min_pool_size=256 -o max_pool_size=1024 -o eject /ltfs +sleep 3 +acp --report /opt/ltfs/$BARCODE.json --to-linear $@ /ltfs/ +sleep 3 +umount /ltfs diff --git a/scripts/mkfs b/scripts/mkfs new file mode 100755 index 0000000..9718783 --- /dev/null +++ b/scripts/mkfs @@ -0,0 +1,6 @@ +#!/usr/bin/env bash +set -ex; + +SG_DEVICE=`sg_map | grep ${DEVICE} | awk '{print $1}'` +mkltfs -f -d ${SG_DEVICE} -s ${TAPE_BARCODE} -n ${TAPE_NAME} +sleep 3 diff --git a/scripts/mount b/scripts/mount new file mode 100755 index 0000000..caa07c2 --- /dev/null +++ b/scripts/mount @@ -0,0 +1,6 @@ +#!/usr/bin/env bash +set -ex; + +SG_DEVICE=`sg_map | grep ${DEVICE} | awk '{print $1}'` +ltfs -o devname=${SG_DEVICE} -o noatime -o sync_type=unmount -o work_directory=/opt/ltfs -o capture_index -o min_pool_size=256 -o max_pool_size=1024 -o eject ${MOUNT_POINT} +sleep 3 diff --git a/scripts/mounttape b/scripts/mounttape new file mode 100755 index 0000000..f8af91f --- /dev/null +++ b/scripts/mounttape @@ -0,0 +1,10 @@ +#!/usr/bin/env bash +set -e; + +DEVICE="/dev/nst0" +SG_DEVICE=`sg_map | grep ${DEVICE} | awk '{print $1}'` + +set -ex; + +stenc -f ${DEVICE} -e on -k /root/tape.key -a 1 --ckod +ltfs -o devname=${SG_DEVICE} -o noatime -o sync_type=unmount -o work_directory=/opt/ltfs -o capture_index -o min_pool_size=256 -o max_pool_size=1024 -o eject /ltfs diff --git a/test.sh b/scripts/test.sh similarity index 61% rename from test.sh rename to scripts/test.sh index 8cc3f07..8d9a968 100644 --- a/test.sh +++ b/scripts/test.sh @@ -1,2 +1,2 @@ GOOS=linux go build -o ./output/ordercp ./cmd/ordercp -scp ./output/ordercp tape:ordercp +scp ./output/ordercp nas:ordercp diff --git a/scripts/umount b/scripts/umount new file mode 100755 index 0000000..278c42a --- /dev/null +++ b/scripts/umount @@ -0,0 +1,6 @@ +#!/usr/bin/env bash +set -ex; + +sleep 15 +umount ${MOUNT_POINT} +sleep 60 diff --git a/sg_tools.go b/sg_tools.go deleted file mode 100644 index 68bd278..0000000 --- a/sg_tools.go +++ /dev/null @@ -1 +0,0 @@ -package tapewriter diff --git a/consts.go b/sgtape/consts.go similarity index 99% rename from consts.go rename to sgtape/consts.go index 2f8d17c..9e0ac4c 100644 --- a/consts.go +++ b/sgtape/consts.go @@ -1,4 +1,4 @@ -package tapewriter +package sgtape const ( SSCCodeAllowOverwrite = 0x82 diff --git a/sgtape/page.go b/sgtape/page.go new file mode 100644 index 0000000..32e4d21 --- /dev/null +++ b/sgtape/page.go @@ -0,0 +1,115 @@ +package sgtape + +type PageDec struct { + // std::uint16_t page_code; + PageCode uint16 + // std::uint16_t length; + Length uint16 + // std::byte flags; + Flags byte + // external data encryption control capable + // static constexpr auto flags_extdecc_pos {2u}; + FlagsExtdeccPos byte // default as 2 + // static constexpr std::byte flags_extdecc_mask {3u << flags_extdecc_pos}; + FlagsExtdeccMask byte // default as 3 << 2 + // configuration prevented + // static constexpr auto flags_cfg_p_pos {0u}; + FlagsCfgPPos byte // default as 0 + // static constexpr std::byte flags_cfg_p_mask {3u << flags_cfg_p_pos}; + FlagsCfgPMask byte // default as 3 << 0 + // std::byte reserved[15]; + Reserved [15]byte + // algorithm_descriptor ads[]; + AlgorithmDescriptors []AlgorithmDescriptor +} + +type AlgorithmDescriptor struct { + // std::uint8_t algorithm_index; + algorithm_index uint8 + // std::byte reserved1; + reserved1 byte + // std::uint16_t length; + length uint16 + // std::byte flags1; + flags1 byte + // // algorithm valid for mounted volume + // static constexpr auto flags1_avfmv_pos {7u}; + flags1_avfmv_pos byte // 7 + // static constexpr std::byte flags1_avfmv_mask {1u << flags1_avfmv_pos}; + flags1_avfmv_mask byte // 1 << 7 + // // supplemental decryption key capable + // static constexpr auto flags1_sdk_c_pos {6u}; + flags1_sdk_c_pos byte // 6 + // static constexpr std::byte flags1_sdk_c_mask {1u << flags1_sdk_c_pos}; + flags1_sdk_c_mask byte // 1 << 6 + // // message authentication code capable + // static constexpr auto flags1_mac_c_pos {5u}; + flags1_mac_c_pos byte // 5 + // static constexpr std::byte flags1_mac_c_mask {1u << flags1_mac_c_pos}; + flags1_mac_c_mask byte // 1 << 5 + // // distinguish encrypted logical block capable + // static constexpr auto flags1_delb_c_pos {4u}; + flags1_delb_c_pos byte // 4 + // static constexpr std::byte flags1_delb_c_mask {1u << flags1_delb_c_pos}; + flags1_delb_c_mask byte // 1 < 4 + // // decryption capabilities + // static constexpr auto flags1_decrypt_c_pos {2u}; + flags1_decrypt_c_pos byte // 2 + // static constexpr std::byte flags1_decrypt_c_mask {3u << flags1_decrypt_c_pos}; + flags1_decrypt_c_mask byte // 3 << 4 + // // encryption capabilities + // static constexpr auto flags1_encrypt_c_pos {0u}; + flags1_encrypt_c_pos byte // 0 + // static constexpr std::byte flags1_encrypt_c_mask {3u << flags1_encrypt_c_pos}; + flags1_encrypt_c_mask byte // 3 << 0 + // std::byte flags2; + flags2 byte + // // algorithm valid for current logical position + // static constexpr auto flags2_avfcp_pos {6u}; + flags2_avfcp_pos byte // 6 + // static constexpr std::byte flags2_avfcp_mask {3u << flags2_avfcp_pos}; + flags2_avfcp_mask byte // 3 << 6 + // // nonce capabilities + // static constexpr auto flags2_nonce_pos {4u}; + flags2_nonce_pos byte // 4 + // static constexpr std::byte flags2_nonce_mask {3u << flags2_nonce_pos}; + flags2_nonce_mask byte // 3 << 4 + // // KAD format capable + // static constexpr auto flags2_kadf_c_pos {3u}; + // static constexpr std::byte flags2_kadf_c_mask {1u << flags2_kadf_c_pos}; + // // volume contains encrypted logical blocks capable + // static constexpr auto flags2_vcelb_c_pos {2u}; + // static constexpr std::byte flags2_vcelb_c_mask {1u << flags2_vcelb_c_pos}; + // // U-KAD fixed + // static constexpr auto flags2_ukadf_pos {1u}; + // static constexpr std::byte flags2_ukadf_mask {1u << flags2_ukadf_pos}; + // // A-KAD fixed + // static constexpr auto flags2_akadf_pos {0u}; + // static constexpr std::byte flags2_akadf_mask {1u << flags2_akadf_pos}; + // std::uint16_t maximum_ukad_length; + // std::uint16_t maximum_akad_length; + // std::uint16_t key_length; + // std::byte flags3; + // // decryption capabilities + // static constexpr auto flags3_dkad_c_pos {6u}; + // static constexpr std::byte flags3_dkad_c_mask {3u << flags3_dkad_c_pos}; + // // external encryption mode control capabilities + // static constexpr auto flags3_eemc_c_pos {4u}; + // static constexpr std::byte flags3_eemc_c_mask {3u << flags3_eemc_c_pos}; + // // raw decryption mode control capabilities + // static constexpr auto flags3_rdmc_c_pos {1u}; + // static constexpr std::byte flags3_rdmc_c_mask {7u << flags3_rdmc_c_pos}; + // // encryption algorithm records encryption mode + // static constexpr auto flags3_earem_pos {0u}; + // static constexpr std::byte flags3_earem_mask {1u << flags3_earem_pos}; + // std::uint8_t maximum_eedk_count; + // static constexpr auto maximum_eedk_count_pos {0u}; + // static constexpr std::uint8_t maximum_eedk_count_mask { + // 15u << maximum_eedk_count_pos}; + // std::uint16_t msdk_count; + // std::uint16_t maximum_eedk_size; + // std::byte reserved2[2]; + // std::uint32_t security_algorithm_code; + + // static constexpr std::size_t header_size {4u}; +} diff --git a/sgtape/sg_tools.go b/sgtape/sg_tools.go new file mode 100644 index 0000000..4c37993 --- /dev/null +++ b/sgtape/sg_tools.go @@ -0,0 +1 @@ +package sgtape diff --git a/tools.go b/sgtape/tools.go similarity index 97% rename from tools.go rename to sgtape/tools.go index 5345a5c..3651d11 100644 --- a/tools.go +++ b/sgtape/tools.go @@ -1,4 +1,4 @@ -package tapewriter +package sgtape import "fmt" diff --git a/writer.go b/sgtape/writer.go.bak similarity index 75% rename from writer.go rename to sgtape/writer.go.bak index 0dfb075..ab6bfe3 100644 --- a/writer.go +++ b/sgtape/writer.go.bak @@ -1,59 +1,26 @@ -package tapewriter +package sgtape import ( - "archive/tar" "encoding/binary" "fmt" "os" - "github.com/benmcclelland/mtio" "github.com/benmcclelland/sgio" "github.com/davecgh/go-spew/spew" "github.com/sirupsen/logrus" ) const ( - MaxSense = 255 - BlockSize = 512 * 1024 + MaxSense = 255 ) -type position struct { - partition uint8 - offset uint64 +type TapeDrive struct { + tape *os.File } -type capacity struct { - cap uint64 - all uint64 -} - -type Writer struct { - *tar.Writer - buffer *BlockWriter - tape *os.File - index []*tar.Header - blockSize int - writen uint64 - current *position -} - -func NewWriter(tape *os.File) (*Writer, error) { - w := &Writer{ - tape: tape, - index: make([]*tar.Header, 0, 16), - blockSize: BlockSize, - writen: 0, - current: new(position), - } - - w.buffer = NewBlockWriter(w.tape, w.blockSize, 32) - w.Writer = tar.NewWriter(w.buffer) - - if err := w.formatTape(); err != nil { - return nil, err - } - if err := mtio.DoOp(w.tape, mtio.NewMtOp(mtio.WithOperation(mtio.MTSETBLK), mtio.WithCount(BlockSize))); err != nil { - return nil, err +func New(tape *os.File) (*TapeDrive, error) { + w := &TapeDrive{ + tape: tape, } cap, err := w.readCapacity() @@ -65,19 +32,57 @@ func NewWriter(tape *os.File) (*Writer, error) { return w, nil } -func (w *Writer) Close() error { - if err := w.Writer.Close(); err != nil { - return err +func (w *TapeDrive) send(cmd []byte, buf []byte) error { + return w.exec(sgio.SG_DXFER_TO_DEV, cmd, buf) +} + +func (w *TapeDrive) read(cmd []byte, size int64) ([]byte, error) { + buf := make([]byte, size) + if err := w.exec(sgio.SG_DXFER_TO_DEV, cmd, buf); err != nil { + return nil, err + } + return buf, nil +} + +func (w *TapeDrive) command(cmd []byte) error { + return w.exec(sgio.SG_DXFER_TO_DEV, cmd, nil) +} + +func (w *TapeDrive) exec(direction int32, cmd []byte, dxfer []byte) error { + senseBuf := make([]byte, MaxSense) + + ioHdr := &sgio.SgIoHdr{ + InterfaceID: int32('S'), + DxferDirection: direction, + + CmdLen: uint8(len(cmd)), + Cmdp: &cmd[0], + + MxSbLen: uint8(len(senseBuf)), + Sbp: &senseBuf[0], + + // DxferLen: 0, + // Dxferp: , + + Timeout: sgio.TIMEOUT_20_SECS, } - if err := w.buffer.Close(); err != nil { + if len(dxfer) > 0 { + ioHdr.DxferLen = uint32(len(dxfer)) + ioHdr.Dxferp = &dxfer[0] + } + + if err := sgio.SgioSyscall(w.tape, ioHdr); err != nil { + return err + } + if err := sgio.CheckSense(ioHdr, &senseBuf); err != nil { return err } return nil } -func (w *Writer) formatTape() error { +func (w *TapeDrive) formatTape() error { // mode sense -> mode select -> format partitionMode := make([]byte, 32) if err := w.modeSense(TC_MP_MEDIUM_PARTITION, TC_MP_PC_CURRENT, 0x00, partitionMode, TC_MP_MEDIUM_PARTITION_SIZE); err != nil { @@ -117,8 +122,13 @@ func (w *Writer) formatTape() error { return nil } +type capacity struct { + cap uint64 + all uint64 +} + // only for lto5 -func (w *Writer) readCapacity() ([]*capacity, error) { +func (w *TapeDrive) readCapacity() ([]*capacity, error) { buf := make([]byte, 1024) if err := w.logSense(LOG_TAPECAPACITY, 0, buf); err != nil { return nil, fmt.Errorf("read capacity fail, err= %w", err) @@ -148,7 +158,7 @@ func (w *Writer) readCapacity() ([]*capacity, error) { return result, nil } -func (w *Writer) modeSense(page, pc, subpage uint8, buf []byte, size uint16) error { +func (w *TapeDrive) modeSense(page, pc, subpage uint8, buf []byte, size uint16) error { cdb := make([]uint8, 10) cdb[0] = SPCCodeModeSense10 cdb[2] = pc | (page & 0x3F) // Current value @@ -159,7 +169,7 @@ func (w *Writer) modeSense(page, pc, subpage uint8, buf []byte, size uint16) err return w.sendCmd(sgio.SG_DXFER_FROM_DEV, buf, cdb...) } -func (w *Writer) modeSelect(buf []byte, size uint16) error { +func (w *TapeDrive) modeSelect(buf []byte, size uint16) error { cdb := make([]uint8, 10) cdb[0] = SPCCodeModeSelect10 cdb[1] = 0x10 @@ -169,7 +179,7 @@ func (w *Writer) modeSelect(buf []byte, size uint16) error { return w.sendCmd(sgio.SG_DXFER_TO_DEV, buf, cdb...) } -func (w *Writer) logSense(page, subpage uint8, buf []byte) error { +func (w *TapeDrive) logSense(page, subpage uint8, buf []byte) error { cdb := make([]uint8, 10) cdb[0] = SPCCodeLogSense cdb[2] = 0x40 | (page & 0x3F) // Current value @@ -186,11 +196,11 @@ func (w *Writer) logSense(page, subpage uint8, buf []byte) error { return nil } -func (w *Writer) formatPartition() error { +func (w *TapeDrive) formatPartition() error { return w.sendCmd(sgio.SG_DXFER_TO_FROM_DEV, nil, SSCCodeFormatMedium, 0, FormatDestPart, 0, 0, 0) } -func (w *Writer) locate(target *position) error { +func (w *TapeDrive) locate(target *position) error { cdb := make([]uint8, 16) cdb[0] = SSCCodeLocate16 if w.current.partition != target.partition { @@ -207,37 +217,3 @@ func (w *Writer) locate(target *position) error { // left := int(target.offset) % int(w.blockSize) return nil } - -func (w *Writer) sendCmd(direction int32, dxfer []byte, cmd ...uint8) error { - senseBuf := make([]byte, MaxSense) - - ioHdr := &sgio.SgIoHdr{ - InterfaceID: int32('S'), - DxferDirection: direction, - - CmdLen: uint8(len(cmd)), - Cmdp: &cmd[0], - - MxSbLen: uint8(len(senseBuf)), - Sbp: &senseBuf[0], - - // DxferLen: 0, - // Dxferp: , - - Timeout: sgio.TIMEOUT_20_SECS, - } - - if len(dxfer) > 0 { - ioHdr.DxferLen = uint32(len(dxfer)) - ioHdr.Dxferp = &dxfer[0] - } - - if err := sgio.SgioSyscall(w.tape, ioHdr); err != nil { - return err - } - if err := sgio.CheckSense(ioHdr, &senseBuf); err != nil { - return err - } - - return nil -} diff --git a/tapechanger/changer.go b/tapechanger/changer.go new file mode 100644 index 0000000..028906a --- /dev/null +++ b/tapechanger/changer.go @@ -0,0 +1,24 @@ +package tapechanger + +import ( + "context" + + "github.com/abc950309/tapewriter/library" +) + +var ( + tapeChangers map[string]func(dsn string) (TapeChanger, error) +) + +type Tape struct { + *library.Tape + MountPoint string +} + +type TapeChanger interface { + Change(ctx context.Context, target *library.Tape) (*Tape, error) +} + +func RegisterTapeChanger(schema string, factory func(dsn string) (TapeChanger, error)) { + tapeChangers[schema] = factory +} diff --git a/tools/command.go b/tools/command.go new file mode 100644 index 0000000..6968e9f --- /dev/null +++ b/tools/command.go @@ -0,0 +1,25 @@ +package tools + +import ( + "context" + "io" + "os/exec" +) + +func RunCommand(ctx context.Context, name string, args []string, stdin io.Reader, stdout, stderr io.Writer) (<-chan error, error) { + cmd := exec.CommandContext(ctx, name, args...) + cmd.Stdin = stdin + cmd.Stdout = stdout + cmd.Stderr = stderr + + if err := cmd.Start(); err != nil { + return nil, err + } + + ch := make(chan error, 1) + go func() { + ch <- cmd.Wait() + }() + + return ch, nil +} diff --git a/tools/filesystem.go b/tools/filesystem.go new file mode 100644 index 0000000..01ebce3 --- /dev/null +++ b/tools/filesystem.go @@ -0,0 +1,28 @@ +package tools + +import ( + "fmt" + "syscall" +) + +type FileSystem struct { + TypeName string + MountPoint string + TotalSize int64 + AvailableSize int64 +} + +func GetFileSystem(path string) (*FileSystem, error) { + stat := new(syscall.Statfs_t) + + if err := syscall.Statfs(path, stat); err != nil { + return nil, fmt.Errorf("read statfs fail, err= %w", err) + } + + return &FileSystem{ + // TypeName: UnpaddingInt8s(stat.Fstypename[:]), + // MountPoint: UnpaddingInt8s(stat.Mntonname[:]), + TotalSize: int64(stat.Blocks) * int64(stat.Bsize), + AvailableSize: int64(stat.Bavail) * int64(stat.Bsize), + }, nil +} diff --git a/tools/filesystem_test.go b/tools/filesystem_test.go new file mode 100644 index 0000000..c677a76 --- /dev/null +++ b/tools/filesystem_test.go @@ -0,0 +1,16 @@ +package tools + +import ( + "testing" + + "github.com/davecgh/go-spew/spew" +) + +func TestGetFileSystem(t *testing.T) { + fs, err := GetFileSystem("/") + if err != nil { + panic(err) + } + + t.Log(spew.Sdump(fs)) +} diff --git a/tools/pprof.go b/tools/pprof.go new file mode 100644 index 0000000..62de5cc --- /dev/null +++ b/tools/pprof.go @@ -0,0 +1,32 @@ +package tools + +import ( + "net/http" + "net/http/pprof" + "strings" + + "github.com/sirupsen/logrus" +) + +// NewDebugServer . +func NewDebugServer(addr string) { + debugMux := http.NewServeMux() + + debugMux.HandleFunc("/debug/pprof/", pprof.Index) + debugMux.HandleFunc("/debug/pprof/cmdline", pprof.Cmdline) + debugMux.HandleFunc("/debug/pprof/profile", pprof.Profile) + debugMux.HandleFunc("/debug/pprof/symbol", pprof.Symbol) + debugMux.HandleFunc("/debug/pprof/trace", pprof.Trace) + + if err := http.ListenAndServe(addr, debugMux); err != nil { + if err == nil { + return + } + + if strings.Contains(err.Error(), "interrupt") { + return + } + + logrus.WithError(err).Errorf("debug server listen and serve fail: addr= %s", addr) + } +} diff --git a/tools/recover.go b/tools/recover.go new file mode 100644 index 0000000..dbc82e8 --- /dev/null +++ b/tools/recover.go @@ -0,0 +1,30 @@ +package tools + +import ( + "context" + "fmt" + "runtime/debug" + + "github.com/sirupsen/logrus" +) + +func Wrap(ctx context.Context, f func()) { + defer func() { + e := recover() + if e == nil { + return + } + + var err error + switch v := e.(type) { + case error: + err = v + default: + err = fmt.Errorf("%v", err) + } + + logrus.WithContext(ctx).WithError(err).Errorf("panic: %s", debug.Stack()) + }() + + f() +} diff --git a/tools/strings.go b/tools/strings.go new file mode 100644 index 0000000..fed3972 --- /dev/null +++ b/tools/strings.go @@ -0,0 +1,14 @@ +package tools + +func UnpaddingInt8s(buf []int8) string { + result := make([]byte, 0, len(buf)) + for _, c := range buf { + if c == 0x00 { + break + } + + result = append(result, byte(c)) + } + + return string(result) +}