feat: useable

This commit is contained in:
Samuel N Cui
2022-12-12 22:48:23 +08:00
parent af8c37b18e
commit f87ec06af6
134 changed files with 18715 additions and 1343 deletions

5
.gitignore vendored
View File

@@ -14,3 +14,8 @@
# Dependency directories (remove the comment below to include it)
# vendor/
output/
frontend/node_modules/
client/node_modules/
tapes.db
upload_test.sh

7
.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,7 @@
{
"protoc": {
"options": [
"--proto_path=./entity"
]
}
}

21
apis/api.go Normal file
View File

@@ -0,0 +1,21 @@
package apis
import (
"github.com/abc950309/tapewriter/entity"
"github.com/abc950309/tapewriter/executor"
"github.com/abc950309/tapewriter/library"
)
// JobGet(context.Context, *entity.JobGetRequest) (*entity.JobGetReply, error)
type API struct {
entity.UnimplementedServiceServer
lib *library.Library
exe *executor.Executor
sourceBase string
}
func New(base string, lib *library.Library, exe *executor.Executor) *API {
return &API{lib: lib, exe: exe, sourceBase: base}
}

98
apis/converts.go Normal file
View File

@@ -0,0 +1,98 @@
package apis
import (
"io/fs"
"path"
"path/filepath"
"time"
"github.com/abc950309/tapewriter/entity"
"github.com/abc950309/tapewriter/executor"
"github.com/abc950309/tapewriter/library"
)
func convertFiles(files ...*library.File) []*entity.File {
results := make([]*entity.File, 0, len(files))
for _, f := range files {
results = append(results, &entity.File{
Id: f.ID,
ParentId: f.ParentID,
Name: f.Name,
Mode: int64(f.Mode),
ModTime: f.ModTime.Unix(),
Size: f.Size,
Hash: f.Hash,
})
}
return results
}
func convertPositions(positions ...*library.Position) []*entity.Position {
results := make([]*entity.Position, 0, len(positions))
for _, p := range positions {
results = append(results, &entity.Position{
Id: p.ID,
FileId: p.FileID,
TapeId: p.TapeID,
Path: p.Path,
Mode: int64(p.Mode),
ModTime: p.ModTime.Unix(),
WriteTime: p.WriteTime.Unix(),
Size: p.Size,
Hash: p.Hash,
})
}
return results
}
func convertSourceFiles(parent string, files ...fs.FileInfo) []*entity.SourceFile {
results := make([]*entity.SourceFile, 0, len(files))
for _, f := range files {
if !f.Mode().IsDir() && !f.Mode().IsRegular() {
continue
}
_, file := path.Split(f.Name())
results = append(results, &entity.SourceFile{
Path: filepath.Join(parent, file),
ParentPath: parent,
Name: file,
Mode: int64(f.Mode()),
ModTime: f.ModTime().Unix(),
Size: f.Size(),
})
}
return results
}
func convertJobs(jobs ...*executor.Job) []*entity.Job {
converted := make([]*entity.Job, 0, len(jobs))
for _, job := range jobs {
converted = append(converted, &entity.Job{
Id: job.ID,
Status: job.Status,
Priority: job.Priority,
CreateTime: job.CreateTime.Unix(),
UpdateTime: job.UpdateTime.Unix(),
State: job.State,
})
}
return converted
}
func convertOptionalTime(t *time.Time) *int64 {
if t == nil {
return nil
}
u := t.Unix()
return &u
}
func map2list[K, T comparable](mapping map[K]T) []T {
result := make([]T, 0, len(mapping))
for _, v := range mapping {
result = append(result, v)
}
return result
}

11
apis/device_list.go Normal file
View File

@@ -0,0 +1,11 @@
package apis
import (
"context"
"github.com/abc950309/tapewriter/entity"
)
func (api *API) DeviceList(ctx context.Context, req *entity.DeviceListRequest) (*entity.DeviceListReply, error) {
return &entity.DeviceListReply{Devices: api.exe.ListAvailableDevices()}, nil
}

16
apis/file_delete.go Normal file
View File

@@ -0,0 +1,16 @@
package apis
import (
"context"
"github.com/abc950309/tapewriter/entity"
mapset "github.com/deckarep/golang-set/v2"
)
func (api *API) FileDelete(ctx context.Context, req *entity.FileDeleteRequest) (*entity.FileDeleteReply, error) {
ids := mapset.NewThreadUnsafeSet(req.Ids...)
if err := api.lib.Delete(ctx, ids.ToSlice()); err != nil {
return nil, err
}
return new(entity.FileDeleteReply), nil
}

54
apis/file_edit.go Normal file
View File

@@ -0,0 +1,54 @@
package apis
import (
"context"
"fmt"
"io/fs"
"path"
"strings"
"github.com/abc950309/tapewriter/entity"
)
func (api *API) FileEdit(ctx context.Context, req *entity.FileEditRequest) (*entity.FileEditReply, error) {
file, err := api.lib.GetFile(ctx, req.Id)
if err != nil {
return nil, err
}
if file == nil {
return nil, fmt.Errorf("file not found, id= %d", req.Id)
}
if req.File.ParentId != nil {
file.ParentID = *req.File.ParentId
}
if req.File.Name != nil {
name := strings.TrimSpace(*req.File.Name)
if name == "" {
return nil, fmt.Errorf("unexpected target name, not a string")
}
if !strings.ContainsAny(name, `\/`) {
file.Name = name
} else {
name = path.Clean(strings.ReplaceAll(name, `\`, `/`))
dirname, filename := path.Split(name)
if filename == "" {
return nil, fmt.Errorf("unexpected target name, end with slash, '%s'", name)
}
dir, err := api.lib.MkdirAll(ctx, file.ParentID, dirname, fs.ModePerm)
if err != nil {
return nil, err
}
file.ParentID = dir.ID
}
}
if err := api.lib.MoveFile(ctx, file); err != nil {
return nil, err
}
return &entity.FileEditReply{File: convertFiles(file)[0]}, nil
}

37
apis/file_get.go Normal file
View File

@@ -0,0 +1,37 @@
package apis
import (
"context"
"errors"
"github.com/abc950309/tapewriter/entity"
"github.com/abc950309/tapewriter/library"
)
func (api *API) FileGet(ctx context.Context, req *entity.FileGetRequest) (*entity.FileGetReply, error) {
libFile, err := api.lib.GetFile(ctx, req.Id)
if err != nil && !errors.Is(err, library.ErrFileNotFound) {
return nil, err
}
var file *entity.File
if libFile != nil {
file = convertFiles(libFile)[0]
}
positions, err := api.lib.GetPositionByFileID(ctx, req.Id)
if err != nil {
return nil, err
}
children, err := api.lib.List(ctx, req.Id)
if err != nil {
return nil, err
}
return &entity.FileGetReply{
File: file,
Positions: convertPositions(positions...),
Children: convertFiles(children...),
}, nil
}

16
apis/file_list_parents.go Normal file
View File

@@ -0,0 +1,16 @@
package apis
import (
"context"
"github.com/abc950309/tapewriter/entity"
)
func (api *API) FileListParents(ctx context.Context, req *entity.FileListParentsRequest) (*entity.FileListParentsReply, error) {
files, err := api.lib.ListParents(ctx, req.Id)
if err != nil {
return nil, err
}
return &entity.FileListParentsReply{Parents: convertFiles(files...)}, nil
}

28
apis/file_mkdir.go Normal file
View File

@@ -0,0 +1,28 @@
package apis
import (
"context"
"fmt"
"io/fs"
"github.com/abc950309/tapewriter/entity"
)
func (api *API) FileMkdir(ctx context.Context, req *entity.FileMkdirRequest) (*entity.FileMkdirReply, error) {
if req.ParentId != 0 {
parent, err := api.lib.GetFile(ctx, req.ParentId)
if err != nil || parent == nil {
return nil, err
}
if parent == nil {
return nil, fmt.Errorf("file not found, id= %d", req.ParentId)
}
}
dir, err := api.lib.MkdirAll(ctx, req.ParentId, req.Path, fs.ModePerm)
if err != nil {
return nil, err
}
return &entity.FileMkdirReply{File: convertFiles(dir)[0]}, nil
}

24
apis/job_create.go Normal file
View File

@@ -0,0 +1,24 @@
package apis
import (
"context"
"github.com/abc950309/tapewriter/entity"
"github.com/abc950309/tapewriter/executor"
)
func (api *API) JobCreate(ctx context.Context, req *entity.JobCreateRequest) (*entity.JobCreateReply, error) {
job, err := api.exe.CreateJob(ctx, &executor.Job{
Status: entity.JobStatus_Pending,
Priority: req.Job.Priority,
}, req.Job.Param)
if err != nil {
return nil, err
}
if err := api.exe.Start(ctx, job); err != nil {
return nil, err
}
return &entity.JobCreateReply{Job: convertJobs(job)[0]}, nil
}

21
apis/job_display.go Normal file
View File

@@ -0,0 +1,21 @@
package apis
import (
"context"
"github.com/abc950309/tapewriter/entity"
)
func (api *API) JobDisplay(ctx context.Context, req *entity.JobDisplayRequest) (*entity.JobDisplayReply, error) {
job, err := api.exe.GetJob(ctx, req.Id)
if err != nil {
return &entity.JobDisplayReply{}, nil
}
result, err := api.exe.Display(ctx, job)
if err != nil {
return &entity.JobDisplayReply{}, nil
}
return &entity.JobDisplayReply{Display: result}, nil
}

32
apis/job_get_log.go Normal file
View File

@@ -0,0 +1,32 @@
package apis
import (
"context"
"fmt"
"io"
"github.com/abc950309/tapewriter/entity"
)
func (api *API) JobGetLog(ctx context.Context, req *entity.JobGetLogRequest) (*entity.JobGetLogReply, error) {
reader, err := api.exe.NewLogReader(req.JobId)
if err != nil {
return nil, fmt.Errorf("open log fail, %w", err)
}
if reader == nil {
return &entity.JobGetLogReply{Logs: []byte{}}, nil
}
if req.Offset > 0 {
if _, err := reader.Seek(req.Offset, 0); err != nil {
return nil, fmt.Errorf("seek log file fail, offset= %d, %w", req.Offset, err)
}
}
buf, err := io.ReadAll(reader)
if err != nil {
return nil, fmt.Errorf("read log fail, %w", err)
}
return &entity.JobGetLogReply{Logs: buf}, nil
}

27
apis/job_list.go Normal file
View File

@@ -0,0 +1,27 @@
package apis
import (
"context"
"fmt"
"github.com/abc950309/tapewriter/entity"
)
func (api *API) JobList(ctx context.Context, req *entity.JobListRequest) (*entity.JobListReply, error) {
switch param := req.Param.(type) {
case *entity.JobListRequest_Mget:
jobs, err := api.exe.MGetJob(ctx, param.Mget.Ids...)
if err != nil {
return nil, err
}
return &entity.JobListReply{Jobs: convertJobs(map2list(jobs)...)}, nil
case *entity.JobListRequest_List:
jobs, err := api.exe.ListJob(ctx, param.List)
if err != nil {
return nil, err
}
return &entity.JobListReply{Jobs: convertJobs(jobs...)}, nil
default:
return nil, fmt.Errorf("unexpected param, %T", req.Param)
}
}

20
apis/job_next.go Normal file
View File

@@ -0,0 +1,20 @@
package apis
import (
"context"
"github.com/abc950309/tapewriter/entity"
)
func (api *API) JobNext(ctx context.Context, req *entity.JobNextRequest) (*entity.JobNextReply, error) {
job, err := api.exe.GetJob(ctx, req.Id)
if err != nil {
return nil, err
}
if err := api.exe.Submit(ctx, job, req.Param); err != nil {
return nil, err
}
return &entity.JobNextReply{Job: convertJobs(job)[0]}, nil
}

88
apis/source_list.go Normal file
View File

@@ -0,0 +1,88 @@
package apis
import (
"context"
"fmt"
"io/fs"
"os"
"path"
"strings"
"github.com/abc950309/tapewriter/entity"
)
func (api *API) SourceList(ctx context.Context, req *entity.SourceListRequest) (*entity.SourceListReply, error) {
if req.Path == "./" {
req.Path = ""
}
parts := strings.Split(req.Path, "/")
filteredParts := make([]string, 1, len(parts)+1)
filteredParts[0] = ""
for _, part := range parts {
if part == "" {
continue
}
filteredParts = append(filteredParts, part)
}
// buf, _ := json.Marshal(filteredParts)
// logrus.WithContext(ctx).Infof("parts= %s", buf)
current := ""
chain := make([]*entity.SourceFile, 0, len(filteredParts))
for _, part := range filteredParts {
p := path.Join(api.sourceBase, current, part)
stat, err := os.Stat(p)
if err != nil {
return nil, err
}
files := convertSourceFiles(current, stat)
if len(files) == 0 {
return nil, fmt.Errorf("unexpected file, %s", current+part)
}
file := files[0]
chain = append(chain, file)
if !fs.FileMode(file.Mode).IsDir() {
break
}
current = path.Join(current, part)
}
if len(chain) == 0 {
return nil, fmt.Errorf("unexpected file, '%s'", req.Path)
}
chain[0].Path = "./"
chain[0].Name = "Root"
file := chain[len(chain)-1]
reply := &entity.SourceListReply{
File: file,
Chain: chain,
}
if !fs.FileMode(file.Mode).IsDir() {
return reply, nil
}
dir := path.Join(api.sourceBase, req.Path)
children, err := os.ReadDir(dir)
if err != nil {
return nil, err
}
infos := make([]fs.FileInfo, 0, len(children))
for _, child := range children {
info, err := child.Info()
if err != nil {
return nil, err
}
infos = append(infos, info)
}
reply.Children = convertSourceFiles(req.Path, infos...)
return reply, nil
}

30
apis/tape_get.go Normal file
View File

@@ -0,0 +1,30 @@
package apis
import (
"context"
"github.com/abc950309/tapewriter/entity"
)
func (api *API) TapeMGet(ctx context.Context, req *entity.TapeMGetRequest) (*entity.TapeMGetReply, error) {
tapes, err := api.lib.MGetTape(ctx, req.Ids...)
if err != nil {
return nil, err
}
converted := make([]*entity.Tape, 0, len(tapes))
for _, tape := range tapes {
converted = append(converted, &entity.Tape{
Id: tape.ID,
Barcode: tape.Barcode,
Name: tape.Name,
Encryption: tape.Encryption,
CreateTime: tape.CreateTime.Unix(),
DestroyTime: convertOptionalTime(tape.DestroyTime),
CapacityBytes: tape.CapacityBytes,
WritenBytes: tape.WritenBytes,
})
}
return &entity.TapeMGetReply{Tapes: converted}, nil
}

View File

@@ -1,78 +0,0 @@
package tapewriter
import (
"io"
"os"
"sync"
"syscall"
)
var (
_ = io.WriteCloser(new(BlockWriter))
)
type BlockWriter struct {
target uintptr
blockSize int
buffer chan []byte
pool sync.Pool
closed sync.WaitGroup
current []byte
off int
}
func NewBlockWriter(tape *os.File, blockSize, bufferBlocks int) *BlockWriter {
w := &BlockWriter{
target: tape.Fd(),
blockSize: blockSize,
buffer: make(chan []byte, bufferBlocks),
current: make([]byte, blockSize),
pool: sync.Pool{New: func() interface{} { return make([]byte, blockSize) }},
}
w.closed.Add(1)
go w.loop()
return w
}
func (w *BlockWriter) Write(buf []byte) (int, error) {
var n, cn int
for len(buf) > 0 {
cn = copy(w.current, buf)
buf = buf[cn:]
w.off += cn
n += cn
if w.off >= w.blockSize {
w.buffer <- w.current
w.current = w.pool.Get().([]byte)
}
}
return n, nil
}
func (w *BlockWriter) Close() error {
w.buffer <- w.current[:w.off]
close(w.buffer)
w.closed.Wait()
return nil
}
func (w *BlockWriter) loop() {
defer w.closed.Done()
for {
buf, ok := <-w.buffer
if !ok {
break
}
_, err := syscall.Write(int(w.target), buf)
if err != nil {
panic(err)
}
}
}

16
build.sh Executable file
View File

@@ -0,0 +1,16 @@
#!/usr/bin/env bash
set -e;
CURDIR=$(cd $(dirname $0); pwd);
cd ${CURDIR};
rm -rf output;
mkdir -p output;
go build -o ./output/httpd ./cmd/tape-httpd;
go build -o ./output/loadtape ./cmd/tape-loadtape;
go build -o ./output/import ./cmd/tape-import;
cp -r scripts ./output/;
cp -r ./frontend/dist ./output/frontend;
cp ./cmd/tape-httpd/tape-writer.service ./output/
cp ./cmd/tape-httpd/config.example.yaml ./output/

7
build_linux.sh Executable file
View File

@@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -e;
CURDIR=$(cd $(dirname $0); pwd);
cd ${CURDIR};
docker run --rm -v $(pwd):/app golang:1.19 sh -c "cd /app && bash build.sh"

View File

@@ -1,11 +0,0 @@
package main
import (
"os"
"github.com/davecgh/go-spew/spew"
)
func main() {
spew.Dump(os.Args)
}

View File

@@ -1,437 +0,0 @@
package main
import (
"context"
"encoding/json"
"fmt"
"hash"
"io"
"os"
"os/signal"
"strings"
"sync"
"sync/atomic"
"time"
"github.com/abc950309/tapewriter/library"
"github.com/abc950309/tapewriter/mmap"
"github.com/minio/sha256-simd"
"github.com/schollz/progressbar/v3"
"github.com/sirupsen/logrus"
)
const (
unexpectFileMode = os.ModeType &^ os.ModeDir
batchSize = 1024 * 1024
)
var (
shaPool = &sync.Pool{New: func() interface{} { return sha256.New() }}
)
func main() {
src, dst := os.Args[1], os.Args[2]
c, err := NewCopyer(dst, src)
if err != nil {
panic(err)
}
c.Run()
if p := os.Getenv("ORDERCP_REPORT_PATH"); p != "" {
errs := make([]string, 0, len(c.errs))
for _, e := range c.errs {
errs = append(errs, e.Error())
}
report, _ := json.Marshal(map[string]interface{}{"errors": errs, "files": c.results})
n := os.Getenv("ORDERCP_REPORT_FILENAME")
if n == "" {
n = time.Now().Format("2006-01-02T15:04:05.999999.csv")
}
r, err := os.Create(fmt.Sprintf("%s/%s", p, n))
if err != nil {
logrus.Warnf("open report fail, path= '%s', err= %w", fmt.Sprintf("%s/%s", p, n), err)
logrus.Infof("report: %s", report)
return
}
defer r.Close()
r.Write(report)
}
}
type Copyer struct {
bar *progressbar.ProgressBar
src []string
dst string
copyed int64
num int64
files []*Job
errs []error
copyPipe chan *CopyJob
changePipe chan *Job
results []*library.TapeFile
}
func NewCopyer(dst string, src ...string) (*Copyer, error) {
dst = strings.TrimSpace(dst)
if dst == "" {
return nil, fmt.Errorf("dst not found")
}
if dst[len(dst)-1] != '/' {
dst = dst + "/"
}
filtered := make([]string, 0, len(src))
for _, s := range src {
s = strings.TrimSpace(s)
if s == "" {
continue
}
srcStat, err := os.Stat(s)
if err != nil {
return nil, fmt.Errorf("check src path '%s', %w", src, err)
}
if srcStat.IsDir() && s[len(s)-1] != '/' {
s = s + "/"
}
filtered = append(filtered, s)
}
if len(filtered) == 0 {
return nil, fmt.Errorf("src not found")
}
src = filtered
dstStat, err := os.Stat(dst)
if err != nil {
return nil, fmt.Errorf("check dst path '%s', %w", dst, err)
}
if !dstStat.IsDir() {
return nil, fmt.Errorf("dst path is not a dir")
}
c := &Copyer{
dst: dst, src: src,
copyPipe: make(chan *CopyJob, 32),
changePipe: make(chan *Job, 8),
}
for _, s := range c.src {
c.walk(s, "", true)
}
var total int64
for _, file := range c.files {
total += file.Size
}
c.bar = progressbar.DefaultBytes(total)
return c, nil
}
func (c *Copyer) Run() {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
signals := make(chan os.Signal, 1)
signal.Notify(signals, os.Interrupt)
go func() {
for sig := range signals {
if sig != os.Interrupt {
continue
}
cancel()
}
}()
go func() {
ticker := time.NewTicker(time.Millisecond * 500)
defer ticker.Stop()
last := int64(0)
for range ticker.C {
current := atomic.LoadInt64(&c.copyed)
c.bar.Add(int(current - last))
last = current
select {
case <-ctx.Done():
close(c.copyPipe)
return
default:
}
}
}()
go func() {
for _, file := range c.files {
c.prepare(ctx, file)
select {
case <-ctx.Done():
close(c.copyPipe)
return
default:
}
}
close(c.copyPipe)
}()
go func() {
for copyer := range c.copyPipe {
hash, err := c.copy(ctx, copyer)
if err != nil {
c.ReportError(c.dst+copyer.Path, err)
if err := os.Remove(c.dst + copyer.Path); err != nil {
c.ReportError(c.dst+copyer.Path, fmt.Errorf("delete file with error fail, %w", err))
}
} else {
if !copyer.Mode.IsDir() {
c.results = append(c.results, &library.TapeFile{
Path: copyer.Path,
Size: copyer.Size,
Mode: copyer.Mode,
ModTime: copyer.ModTime,
WriteTime: time.Now(),
Hash: hash,
})
}
}
select {
case <-ctx.Done():
close(c.changePipe)
return
default:
}
}
close(c.changePipe)
}()
for file := range c.changePipe {
c.changeInfo(file)
}
}
func (c *Copyer) ReportError(file string, err error) {
logrus.Errorf("'%s', %s", file, err)
c.errs = append(c.errs, fmt.Errorf("'%s': %w", file, err))
}
func (c *Copyer) walk(src, path string, first bool) {
name := src + path
stat, err := os.Stat(name)
if err != nil {
c.ReportError(name, fmt.Errorf("walk get stat, %w", err))
return
}
job := NewJobFromFileInfo(src, path, stat)
if job.Mode&unexpectFileMode != 0 {
return
}
if !job.Mode.IsDir() {
c.num++
job.Number = c.num
c.files = append(c.files, job)
return
}
if first {
files, err := os.ReadDir(name)
if err != nil {
c.ReportError(name, fmt.Errorf("walk read dir, %w", err))
return
}
for _, file := range files {
c.walk(src, file.Name(), false)
}
return
}
enterJob := new(Job)
*enterJob = *job
enterJob.Type = JobTypeEnterDir
c.files = append(c.files, enterJob)
files, err := os.ReadDir(name)
if err != nil {
c.ReportError(name, fmt.Errorf("walk read dir, %w", err))
return
}
for _, file := range files {
c.walk(src, path+"/"+file.Name(), false)
}
exitJob := new(Job)
*exitJob = *job
exitJob.Type = JobTypeExitDir
c.files = append(c.files, exitJob)
}
func (c *Copyer) prepare(ctx context.Context, job *Job) {
switch job.Type {
case JobTypeEnterDir:
name := c.dst + job.Path
err := os.Mkdir(name, job.Mode&os.ModePerm)
if err != nil {
c.ReportError(name, fmt.Errorf("mkdir fail, %w", err))
return
}
return
case JobTypeExitDir:
c.copyPipe <- &CopyJob{Job: job}
return
}
name := job.Source + job.Path
file, err := mmap.Open(name)
if err != nil {
c.ReportError(name, fmt.Errorf("open src file fail, %w", err))
return
}
c.copyPipe <- &CopyJob{Job: job, src: file}
}
func (c *Copyer) copy(ctx context.Context, job *CopyJob) ([]byte, error) {
if job.src == nil {
c.changePipe <- job.Job
return nil, nil
}
defer job.src.Close()
name := c.dst + job.Path
file, err := os.Create(name)
if err != nil {
return nil, fmt.Errorf("open dst file fail, %w", err)
}
defer file.Close()
c.bar.Describe(fmt.Sprintf("[%d/%d]: %s", job.Number, c.num, job.Path))
hash, err := c.streamCopy(ctx, file, job.src)
if err != nil {
return nil, fmt.Errorf("copy file fail, %w", err)
}
c.changePipe <- job.Job
return hash, nil
}
func (c *Copyer) changeInfo(info *Job) {
name := c.dst + info.Path
if err := os.Chmod(name, info.Mode&os.ModePerm); err != nil {
c.ReportError(name, fmt.Errorf("change info, chmod fail, %w", err))
}
if err := os.Chtimes(name, info.ModTime, info.ModTime); err != nil {
c.ReportError(name, fmt.Errorf("change info, chtimes fail, %w", err))
}
}
func (c *Copyer) streamCopy(ctx context.Context, dst io.Writer, src *mmap.ReaderAt) (h []byte, err error) {
if src.Len() == 0 {
return nil, nil
}
sha := shaPool.Get().(hash.Hash)
sha.Reset()
defer shaPool.Put(sha)
var wg sync.WaitGroup
hashChan := make(chan []byte, 4)
defer func() {
close(hashChan)
if err != nil {
return
}
wg.Wait()
h = sha.Sum(nil)
}()
wg.Add(1)
go func() {
defer wg.Done()
for buf := range hashChan {
sha.Write(buf)
}
}()
err = func() error {
for idx := int64(0); ; idx += batchSize {
buf, err := src.Slice(idx, batchSize)
if err != nil {
return fmt.Errorf("slice mmap fail, %w", err)
}
nr := len(buf)
hashChan <- buf
nw, ew := dst.Write(buf)
if nw < 0 || nr < nw {
nw = 0
if ew == nil {
return fmt.Errorf("write fail, unexpected return, byte_num= %d", nw)
}
return fmt.Errorf("write fail, %w", ew)
}
if nr != nw {
return fmt.Errorf("write fail, write and read bytes not equal, read= %d write= %d", nr, nw)
}
atomic.AddInt64(&c.copyed, int64(nr))
if len(buf) < batchSize {
return nil
}
select {
case <-ctx.Done():
return ctx.Err()
default:
}
}
}()
return
}
type JobType uint8
const (
JobTypeNormal = JobType(iota)
JobTypeEnterDir
JobTypeExitDir
)
type Job struct {
Source string
Path string
Type JobType
Number int64
Name string // base name of the file
Size int64 // length in bytes for regular files; system-dependent for others
Mode os.FileMode // file mode bits
ModTime time.Time // modification time
}
func NewJobFromFileInfo(src, path string, info os.FileInfo) *Job {
job := &Job{
Source: src,
Path: path,
Name: info.Name(),
Size: info.Size(),
Mode: info.Mode(),
ModTime: info.ModTime(),
}
return job
}
type CopyJob struct {
*Job
src *mmap.ReaderAt
}

View File

@@ -0,0 +1,19 @@
domain: http://127.0.0.1:8080
listen: 127.0.0.1:8080
debug_listen: 127.0.0.1:8081
work_directory: ./
database:
dialect: sqlite
dsn: ./tapes.db
tape_devices:
- /dev/nst0
filesystem_root: ./
scripts:
encrypt: ./scripts/encrypt
mkfs: ./scripts/mkfs
mount: ./scripts/mount
umount: ./scripts/umount

117
cmd/tape-httpd/main.go Normal file
View File

@@ -0,0 +1,117 @@
package main
import (
"bytes"
"context"
"flag"
"fmt"
"io/ioutil"
"log"
"net/http"
"os"
"github.com/abc950309/tapewriter/apis"
"github.com/abc950309/tapewriter/entity"
"github.com/abc950309/tapewriter/executor"
"github.com/abc950309/tapewriter/library"
"github.com/abc950309/tapewriter/resource"
"github.com/abc950309/tapewriter/tools"
"github.com/improbable-eng/grpc-web/go/grpcweb"
"google.golang.org/grpc"
"gopkg.in/yaml.v2"
)
type config struct {
Domain string `yaml:"domain"`
Listen string `yaml:"listen"`
DebugListen string `yaml:"debug_listen"`
WorkDirectory string `yaml:"work_directory"`
Database struct {
Dialect string `yaml:"dialect"`
DSN string `yaml:"dsn"`
} `yaml:"database"`
TapeDevices []string `yaml:"tape_devices"`
FilesystemRoot string `yaml:"filesystem_root"`
Scripts struct {
Encrypt string `yaml:"encrypt"`
Mkfs string `yaml:"mkfs"`
Mount string `yaml:"mount"`
Umount string `yaml:"umount"`
} `yaml:"scripts"`
}
var (
configPath = flag.String("config", "./config.yaml", "config file path")
)
func main() {
flag.Parse()
cf, err := os.Open(*configPath)
if err != nil {
panic(err)
}
conf := new(config)
if err := yaml.NewDecoder(cf).Decode(conf); err != nil {
panic(err)
}
if conf.DebugListen != "" {
go tools.Wrap(context.Background(), func() { tools.NewDebugServer(conf.DebugListen) })
}
db, err := resource.NewDBConn(conf.Database.Dialect, conf.Database.DSN)
if err != nil {
panic(err)
}
lib := library.New(db)
if err := lib.AutoMigrate(); err != nil {
panic(err)
}
exe := executor.New(
db, lib, conf.TapeDevices, conf.WorkDirectory,
conf.Scripts.Encrypt, conf.Scripts.Mkfs, conf.Scripts.Mount, conf.Scripts.Umount,
)
if err := exe.AutoMigrate(); err != nil {
panic(err)
}
s := grpc.NewServer()
api := apis.New(conf.FilesystemRoot, lib, exe)
entity.RegisterServiceServer(s, api)
mux := http.NewServeMux()
grpcWebServer := grpcweb.WrapServer(s, grpcweb.WithOriginFunc(func(origin string) bool { return true }))
mux.Handle("/services/", http.StripPrefix("/services/", grpcWebServer))
fs := http.FileServer(http.Dir("./frontend/assets"))
mux.Handle("/assets/", http.StripPrefix("/assets/", fs))
indexBuf, err := ioutil.ReadFile("./frontend/index.html")
if err != nil {
panic(err)
}
indexBuf = bytes.ReplaceAll(indexBuf, []byte("%%API_BASE%%"), []byte(fmt.Sprintf("%s/services", conf.Domain)))
mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/html; charset=utf-8")
w.Write(indexBuf)
})
srv := &http.Server{
Handler: mux,
Addr: conf.Listen,
}
log.Printf("http server listening at %v", srv.Addr)
if err := srv.ListenAndServe(); err != nil {
log.Fatalf("failed to serve: %v", err)
}
}

View File

@@ -0,0 +1,16 @@
[Unit]
Description=Tape Writer Service
Documentation=https://github.com/abc950309/tapewriter/
After=network.target
[Service]
User=root
Type=simple
WorkingDirectory=/opt/tapewriter
ExecStart=/opt/tapewriter/httpd
Restart=always
RestartSec=15
StartLimitInterval=0
[Install]
WantedBy=multi-user.target

38
cmd/tape-import/main.go Normal file
View File

@@ -0,0 +1,38 @@
package main
import (
"context"
"os"
"github.com/abc950309/tapewriter/external"
"github.com/abc950309/tapewriter/library"
"github.com/abc950309/tapewriter/resource"
)
func main() {
ctx := context.Background()
db, err := resource.NewDBConn("sqlite", "./tapes.db")
if err != nil {
panic(err)
}
lib := library.New(db)
if err := lib.AutoMigrate(); err != nil {
panic(err)
}
file := os.Args[1]
barcode := os.Args[2]
name := os.Args[3]
f, err := os.Open(file)
if err != nil {
panic(err)
}
ext := external.New(lib)
if err := ext.ImportACPReport(ctx, barcode, name, "file:tape.key", f); err != nil {
panic(err)
}
}

89
cmd/tape-loadtape/main.go Normal file
View File

@@ -0,0 +1,89 @@
package main
import (
"context"
"flag"
"fmt"
"os"
"github.com/abc950309/tapewriter/executor"
"github.com/abc950309/tapewriter/library"
"github.com/abc950309/tapewriter/resource"
"gopkg.in/yaml.v2"
)
type config struct {
WorkDirectory string `yaml:"work_directory"`
Database struct {
Dialect string `yaml:"dialect"`
DSN string `yaml:"dsn"`
} `yaml:"database"`
TapeDevices []string `yaml:"tape_devices"`
FilesystemRoot string `yaml:"filesystem_root"`
Scripts struct {
Encrypt string `yaml:"encrypt"`
Mkfs string `yaml:"mkfs"`
Mount string `yaml:"mount"`
Umount string `yaml:"umount"`
} `yaml:"scripts"`
}
var (
configPath = flag.String("config", "./config.yaml", "config file path")
barcode = flag.String("barcode", "", "barcode for tape")
device = flag.String("device", "/dev/nst0", "barcode for tape")
)
func main() {
flag.Parse()
if *barcode == "" {
panic("expect barcode")
}
cf, err := os.Open(*configPath)
if err != nil {
panic(err)
}
conf := new(config)
if err := yaml.NewDecoder(cf).Decode(conf); err != nil {
panic(err)
}
db, err := resource.NewDBConn(conf.Database.Dialect, conf.Database.DSN)
if err != nil {
panic(err)
}
lib := library.New(db)
if err := lib.AutoMigrate(); err != nil {
panic(err)
}
exe := executor.New(
db, lib, conf.TapeDevices, conf.WorkDirectory,
conf.Scripts.Encrypt, conf.Scripts.Mkfs, conf.Scripts.Mount, conf.Scripts.Umount,
)
if err := exe.AutoMigrate(); err != nil {
panic(err)
}
ctx := context.Background()
tapes, err := lib.MGetTapeByBarcode(ctx, *barcode)
if err != nil {
panic(err)
}
tape := tapes[*barcode]
if tape == nil {
panic(fmt.Errorf("tape not found, barcode= %s", *barcode))
}
if err := exe.RestoreLoadTape(ctx, *device, tape); err != nil {
panic(err)
}
}

View File

@@ -1,47 +0,0 @@
package main
import (
"archive/tar"
"fmt"
"io"
"os"
"github.com/abc950309/tapewriter"
)
func main() {
f, err := os.OpenFile("/dev/st0", os.O_WRONLY, 0666)
if err != nil {
panic(err)
}
w, err := tapewriter.NewWriter(f)
if err != nil {
panic(err)
}
path := os.Args[1]
info, err := os.Stat(path)
if err != nil {
panic(err)
}
target, err := os.Open(path)
if err != nil {
panic(err)
}
w.WriteHeader(&tar.Header{
Name: info.Name(),
Size: info.Size(),
})
// syscall.Write()
written, err := io.Copy(w, target)
if err != nil {
panic(err)
}
fmt.Println(written)
}

144
entity/copy_status.pb.go Normal file
View File

@@ -0,0 +1,144 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.28.1
// protoc v3.21.10
// source: copy_status.proto
package entity
import (
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
type CopyStatus int32
const (
CopyStatus_Draft CopyStatus = 0
CopyStatus_Pending CopyStatus = 1 // waiting in queue
CopyStatus_Running CopyStatus = 2
CopyStatus_Staged CopyStatus = 3
CopyStatus_Submited CopyStatus = 4
CopyStatus_Failed CopyStatus = 255
)
// Enum value maps for CopyStatus.
var (
CopyStatus_name = map[int32]string{
0: "Draft",
1: "Pending",
2: "Running",
3: "Staged",
4: "Submited",
255: "Failed",
}
CopyStatus_value = map[string]int32{
"Draft": 0,
"Pending": 1,
"Running": 2,
"Staged": 3,
"Submited": 4,
"Failed": 255,
}
)
func (x CopyStatus) Enum() *CopyStatus {
p := new(CopyStatus)
*p = x
return p
}
func (x CopyStatus) String() string {
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
}
func (CopyStatus) Descriptor() protoreflect.EnumDescriptor {
return file_copy_status_proto_enumTypes[0].Descriptor()
}
func (CopyStatus) Type() protoreflect.EnumType {
return &file_copy_status_proto_enumTypes[0]
}
func (x CopyStatus) Number() protoreflect.EnumNumber {
return protoreflect.EnumNumber(x)
}
// Deprecated: Use CopyStatus.Descriptor instead.
func (CopyStatus) EnumDescriptor() ([]byte, []int) {
return file_copy_status_proto_rawDescGZIP(), []int{0}
}
var File_copy_status_proto protoreflect.FileDescriptor
var file_copy_status_proto_rawDesc = []byte{
0x0a, 0x11, 0x63, 0x6f, 0x70, 0x79, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x12, 0x0b, 0x63, 0x6f, 0x70, 0x79, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73,
0x2a, 0x58, 0x0a, 0x0a, 0x43, 0x6f, 0x70, 0x79, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x09,
0x0a, 0x05, 0x44, 0x72, 0x61, 0x66, 0x74, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x50, 0x65, 0x6e,
0x64, 0x69, 0x6e, 0x67, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x52, 0x75, 0x6e, 0x6e, 0x69, 0x6e,
0x67, 0x10, 0x02, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x67, 0x65, 0x64, 0x10, 0x03, 0x12,
0x0c, 0x0a, 0x08, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x65, 0x64, 0x10, 0x04, 0x12, 0x0b, 0x0a,
0x06, 0x46, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x10, 0xff, 0x01, 0x42, 0x28, 0x5a, 0x26, 0x67, 0x69,
0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x61, 0x62, 0x63, 0x39, 0x35, 0x30, 0x33,
0x30, 0x39, 0x2f, 0x74, 0x61, 0x70, 0x65, 0x77, 0x72, 0x69, 0x74, 0x65, 0x72, 0x2f, 0x65, 0x6e,
0x74, 0x69, 0x74, 0x79, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_copy_status_proto_rawDescOnce sync.Once
file_copy_status_proto_rawDescData = file_copy_status_proto_rawDesc
)
func file_copy_status_proto_rawDescGZIP() []byte {
file_copy_status_proto_rawDescOnce.Do(func() {
file_copy_status_proto_rawDescData = protoimpl.X.CompressGZIP(file_copy_status_proto_rawDescData)
})
return file_copy_status_proto_rawDescData
}
var file_copy_status_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
var file_copy_status_proto_goTypes = []interface{}{
(CopyStatus)(0), // 0: copy_status.CopyStatus
}
var file_copy_status_proto_depIdxs = []int32{
0, // [0:0] is the sub-list for method output_type
0, // [0:0] is the sub-list for method input_type
0, // [0:0] is the sub-list for extension type_name
0, // [0:0] is the sub-list for extension extendee
0, // [0:0] is the sub-list for field type_name
}
func init() { file_copy_status_proto_init() }
func file_copy_status_proto_init() {
if File_copy_status_proto != nil {
return
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_copy_status_proto_rawDesc,
NumEnums: 1,
NumMessages: 0,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_copy_status_proto_goTypes,
DependencyIndexes: file_copy_status_proto_depIdxs,
EnumInfos: file_copy_status_proto_enumTypes,
}.Build()
File_copy_status_proto = out.File
file_copy_status_proto_rawDesc = nil
file_copy_status_proto_goTypes = nil
file_copy_status_proto_depIdxs = nil
}

14
entity/copy_status.proto Normal file
View File

@@ -0,0 +1,14 @@
syntax = "proto3";
package copy_status;
option go_package = "github.com/abc950309/tapewriter/entity";
enum CopyStatus {
Draft = 0;
Pending = 1; // waiting in queue
Running = 2;
Staged = 3;
Submited = 4;
Failed = 255;
}

274
entity/file.pb.go Normal file
View File

@@ -0,0 +1,274 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.28.1
// protoc v3.21.10
// source: file.proto
package entity
import (
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
type File struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Id int64 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"`
ParentId int64 `protobuf:"varint,2,opt,name=parent_id,json=parentId,proto3" json:"parent_id,omitempty"`
Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"`
Mode int64 `protobuf:"varint,17,opt,name=mode,proto3" json:"mode,omitempty"`
ModTime int64 `protobuf:"varint,18,opt,name=mod_time,json=modTime,proto3" json:"mod_time,omitempty"`
Size int64 `protobuf:"varint,19,opt,name=size,proto3" json:"size,omitempty"`
Hash []byte `protobuf:"bytes,20,opt,name=hash,proto3" json:"hash,omitempty"`
}
func (x *File) Reset() {
*x = File{}
if protoimpl.UnsafeEnabled {
mi := &file_file_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *File) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*File) ProtoMessage() {}
func (x *File) ProtoReflect() protoreflect.Message {
mi := &file_file_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use File.ProtoReflect.Descriptor instead.
func (*File) Descriptor() ([]byte, []int) {
return file_file_proto_rawDescGZIP(), []int{0}
}
func (x *File) GetId() int64 {
if x != nil {
return x.Id
}
return 0
}
func (x *File) GetParentId() int64 {
if x != nil {
return x.ParentId
}
return 0
}
func (x *File) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *File) GetMode() int64 {
if x != nil {
return x.Mode
}
return 0
}
func (x *File) GetModTime() int64 {
if x != nil {
return x.ModTime
}
return 0
}
func (x *File) GetSize() int64 {
if x != nil {
return x.Size
}
return 0
}
func (x *File) GetHash() []byte {
if x != nil {
return x.Hash
}
return nil
}
type EditedFile struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
ParentId *int64 `protobuf:"varint,2,opt,name=parent_id,json=parentId,proto3,oneof" json:"parent_id,omitempty"`
Name *string `protobuf:"bytes,3,opt,name=name,proto3,oneof" json:"name,omitempty"`
}
func (x *EditedFile) Reset() {
*x = EditedFile{}
if protoimpl.UnsafeEnabled {
mi := &file_file_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *EditedFile) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*EditedFile) ProtoMessage() {}
func (x *EditedFile) ProtoReflect() protoreflect.Message {
mi := &file_file_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use EditedFile.ProtoReflect.Descriptor instead.
func (*EditedFile) Descriptor() ([]byte, []int) {
return file_file_proto_rawDescGZIP(), []int{1}
}
func (x *EditedFile) GetParentId() int64 {
if x != nil && x.ParentId != nil {
return *x.ParentId
}
return 0
}
func (x *EditedFile) GetName() string {
if x != nil && x.Name != nil {
return *x.Name
}
return ""
}
var File_file_proto protoreflect.FileDescriptor
var file_file_proto_rawDesc = []byte{
0x0a, 0x0a, 0x66, 0x69, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x04, 0x66, 0x69,
0x6c, 0x65, 0x22, 0x9e, 0x01, 0x0a, 0x04, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69,
0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1b, 0x0a, 0x09, 0x70,
0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08,
0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65,
0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04,
0x6d, 0x6f, 0x64, 0x65, 0x18, 0x11, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x6d, 0x6f, 0x64, 0x65,
0x12, 0x19, 0x0a, 0x08, 0x6d, 0x6f, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x12, 0x20, 0x01,
0x28, 0x03, 0x52, 0x07, 0x6d, 0x6f, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x73,
0x69, 0x7a, 0x65, 0x18, 0x13, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x12,
0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x68,
0x61, 0x73, 0x68, 0x22, 0x5e, 0x0a, 0x0a, 0x45, 0x64, 0x69, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c,
0x65, 0x12, 0x20, 0x0a, 0x09, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x02,
0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x08, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x49, 0x64,
0x88, 0x01, 0x01, 0x12, 0x17, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28,
0x09, 0x48, 0x01, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x42, 0x0c, 0x0a, 0x0a,
0x5f, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x42, 0x07, 0x0a, 0x05, 0x5f, 0x6e,
0x61, 0x6d, 0x65, 0x42, 0x28, 0x5a, 0x26, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f,
0x6d, 0x2f, 0x61, 0x62, 0x63, 0x39, 0x35, 0x30, 0x33, 0x30, 0x39, 0x2f, 0x74, 0x61, 0x70, 0x65,
0x77, 0x72, 0x69, 0x74, 0x65, 0x72, 0x2f, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x62, 0x06, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_file_proto_rawDescOnce sync.Once
file_file_proto_rawDescData = file_file_proto_rawDesc
)
func file_file_proto_rawDescGZIP() []byte {
file_file_proto_rawDescOnce.Do(func() {
file_file_proto_rawDescData = protoimpl.X.CompressGZIP(file_file_proto_rawDescData)
})
return file_file_proto_rawDescData
}
var file_file_proto_msgTypes = make([]protoimpl.MessageInfo, 2)
var file_file_proto_goTypes = []interface{}{
(*File)(nil), // 0: file.File
(*EditedFile)(nil), // 1: file.EditedFile
}
var file_file_proto_depIdxs = []int32{
0, // [0:0] is the sub-list for method output_type
0, // [0:0] is the sub-list for method input_type
0, // [0:0] is the sub-list for extension type_name
0, // [0:0] is the sub-list for extension extendee
0, // [0:0] is the sub-list for field type_name
}
func init() { file_file_proto_init() }
func file_file_proto_init() {
if File_file_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_file_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*File); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_file_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*EditedFile); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
file_file_proto_msgTypes[1].OneofWrappers = []interface{}{}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_file_proto_rawDesc,
NumEnums: 0,
NumMessages: 2,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_file_proto_goTypes,
DependencyIndexes: file_file_proto_depIdxs,
MessageInfos: file_file_proto_msgTypes,
}.Build()
File_file_proto = out.File
file_file_proto_rawDesc = nil
file_file_proto_goTypes = nil
file_file_proto_depIdxs = nil
}

19
entity/file.proto Normal file
View File

@@ -0,0 +1,19 @@
syntax = "proto3";
package file;
option go_package = "github.com/abc950309/tapewriter/entity";
message File {
int64 id = 1;
int64 parent_id = 2;
string name = 3;
int64 mode = 17;
int64 mod_time = 18;
int64 size = 19;
bytes hash = 20;
}
message EditedFile {
optional int64 parent_id = 2;
optional string name = 3;
}

32
entity/job.go Normal file
View File

@@ -0,0 +1,32 @@
package entity
import (
"database/sql"
"database/sql/driver"
)
var (
_ = sql.Scanner(&JobParam{})
_ = driver.Valuer(&JobParam{})
)
func (x *JobParam) Scan(src any) error {
return Scan(x, src)
}
func (x *JobParam) Value() (driver.Value, error) {
return Value(x)
}
var (
_ = sql.Scanner(&JobState{})
_ = driver.Valuer(&JobState{})
)
func (x *JobState) Scan(src any) error {
return Scan(x, src)
}
func (x *JobState) Value() (driver.Value, error) {
return Value(x)
}

782
entity/job.pb.go Normal file
View File

@@ -0,0 +1,782 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.28.1
// protoc v3.21.10
// source: job.proto
package entity
import (
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
type JobStatus int32
const (
JobStatus_Draft JobStatus = 0
JobStatus_NotReady JobStatus = 1 // dependencies not satisfied
JobStatus_Pending JobStatus = 2 // waiting in queue
JobStatus_Processing JobStatus = 3
JobStatus_Completed JobStatus = 4
JobStatus_Failed JobStatus = 255
)
// Enum value maps for JobStatus.
var (
JobStatus_name = map[int32]string{
0: "Draft",
1: "NotReady",
2: "Pending",
3: "Processing",
4: "Completed",
255: "Failed",
}
JobStatus_value = map[string]int32{
"Draft": 0,
"NotReady": 1,
"Pending": 2,
"Processing": 3,
"Completed": 4,
"Failed": 255,
}
)
func (x JobStatus) Enum() *JobStatus {
p := new(JobStatus)
*p = x
return p
}
func (x JobStatus) String() string {
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
}
func (JobStatus) Descriptor() protoreflect.EnumDescriptor {
return file_job_proto_enumTypes[0].Descriptor()
}
func (JobStatus) Type() protoreflect.EnumType {
return &file_job_proto_enumTypes[0]
}
func (x JobStatus) Number() protoreflect.EnumNumber {
return protoreflect.EnumNumber(x)
}
// Deprecated: Use JobStatus.Descriptor instead.
func (JobStatus) EnumDescriptor() ([]byte, []int) {
return file_job_proto_rawDescGZIP(), []int{0}
}
type Job struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Id int64 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"`
Status JobStatus `protobuf:"varint,2,opt,name=status,proto3,enum=job.JobStatus" json:"status,omitempty"`
Priority int64 `protobuf:"varint,3,opt,name=priority,proto3" json:"priority,omitempty"`
CreateTime int64 `protobuf:"varint,4,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"`
UpdateTime int64 `protobuf:"varint,5,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"`
State *JobState `protobuf:"bytes,17,opt,name=state,proto3" json:"state,omitempty"`
}
func (x *Job) Reset() {
*x = Job{}
if protoimpl.UnsafeEnabled {
mi := &file_job_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Job) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Job) ProtoMessage() {}
func (x *Job) ProtoReflect() protoreflect.Message {
mi := &file_job_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Job.ProtoReflect.Descriptor instead.
func (*Job) Descriptor() ([]byte, []int) {
return file_job_proto_rawDescGZIP(), []int{0}
}
func (x *Job) GetId() int64 {
if x != nil {
return x.Id
}
return 0
}
func (x *Job) GetStatus() JobStatus {
if x != nil {
return x.Status
}
return JobStatus_Draft
}
func (x *Job) GetPriority() int64 {
if x != nil {
return x.Priority
}
return 0
}
func (x *Job) GetCreateTime() int64 {
if x != nil {
return x.CreateTime
}
return 0
}
func (x *Job) GetUpdateTime() int64 {
if x != nil {
return x.UpdateTime
}
return 0
}
func (x *Job) GetState() *JobState {
if x != nil {
return x.State
}
return nil
}
type JobParam struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Types that are assignable to Param:
// *JobParam_Archive
Param isJobParam_Param `protobuf_oneof:"param"`
}
func (x *JobParam) Reset() {
*x = JobParam{}
if protoimpl.UnsafeEnabled {
mi := &file_job_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *JobParam) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*JobParam) ProtoMessage() {}
func (x *JobParam) ProtoReflect() protoreflect.Message {
mi := &file_job_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use JobParam.ProtoReflect.Descriptor instead.
func (*JobParam) Descriptor() ([]byte, []int) {
return file_job_proto_rawDescGZIP(), []int{1}
}
func (m *JobParam) GetParam() isJobParam_Param {
if m != nil {
return m.Param
}
return nil
}
func (x *JobParam) GetArchive() *JobParamArchive {
if x, ok := x.GetParam().(*JobParam_Archive); ok {
return x.Archive
}
return nil
}
type isJobParam_Param interface {
isJobParam_Param()
}
type JobParam_Archive struct {
Archive *JobParamArchive `protobuf:"bytes,1,opt,name=Archive,proto3,oneof"`
}
func (*JobParam_Archive) isJobParam_Param() {}
type JobState struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Types that are assignable to State:
// *JobState_Archive
State isJobState_State `protobuf_oneof:"state"`
}
func (x *JobState) Reset() {
*x = JobState{}
if protoimpl.UnsafeEnabled {
mi := &file_job_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *JobState) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*JobState) ProtoMessage() {}
func (x *JobState) ProtoReflect() protoreflect.Message {
mi := &file_job_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use JobState.ProtoReflect.Descriptor instead.
func (*JobState) Descriptor() ([]byte, []int) {
return file_job_proto_rawDescGZIP(), []int{2}
}
func (m *JobState) GetState() isJobState_State {
if m != nil {
return m.State
}
return nil
}
func (x *JobState) GetArchive() *JobStateArchive {
if x, ok := x.GetState().(*JobState_Archive); ok {
return x.Archive
}
return nil
}
type isJobState_State interface {
isJobState_State()
}
type JobState_Archive struct {
Archive *JobStateArchive `protobuf:"bytes,1,opt,name=Archive,proto3,oneof"`
}
func (*JobState_Archive) isJobState_State() {}
type JobNextParam struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Types that are assignable to Param:
// *JobNextParam_Archive
Param isJobNextParam_Param `protobuf_oneof:"param"`
}
func (x *JobNextParam) Reset() {
*x = JobNextParam{}
if protoimpl.UnsafeEnabled {
mi := &file_job_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *JobNextParam) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*JobNextParam) ProtoMessage() {}
func (x *JobNextParam) ProtoReflect() protoreflect.Message {
mi := &file_job_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use JobNextParam.ProtoReflect.Descriptor instead.
func (*JobNextParam) Descriptor() ([]byte, []int) {
return file_job_proto_rawDescGZIP(), []int{3}
}
func (m *JobNextParam) GetParam() isJobNextParam_Param {
if m != nil {
return m.Param
}
return nil
}
func (x *JobNextParam) GetArchive() *JobArchiveNextParam {
if x, ok := x.GetParam().(*JobNextParam_Archive); ok {
return x.Archive
}
return nil
}
type isJobNextParam_Param interface {
isJobNextParam_Param()
}
type JobNextParam_Archive struct {
Archive *JobArchiveNextParam `protobuf:"bytes,1,opt,name=archive,proto3,oneof"`
}
func (*JobNextParam_Archive) isJobNextParam_Param() {}
type CreatableJob struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Priority int64 `protobuf:"varint,3,opt,name=priority,proto3" json:"priority,omitempty"`
Param *JobParam `protobuf:"bytes,17,opt,name=param,proto3" json:"param,omitempty"`
}
func (x *CreatableJob) Reset() {
*x = CreatableJob{}
if protoimpl.UnsafeEnabled {
mi := &file_job_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *CreatableJob) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*CreatableJob) ProtoMessage() {}
func (x *CreatableJob) ProtoReflect() protoreflect.Message {
mi := &file_job_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use CreatableJob.ProtoReflect.Descriptor instead.
func (*CreatableJob) Descriptor() ([]byte, []int) {
return file_job_proto_rawDescGZIP(), []int{4}
}
func (x *CreatableJob) GetPriority() int64 {
if x != nil {
return x.Priority
}
return 0
}
func (x *CreatableJob) GetParam() *JobParam {
if x != nil {
return x.Param
}
return nil
}
type JobFilter struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Status *JobStatus `protobuf:"varint,1,opt,name=status,proto3,enum=job.JobStatus,oneof" json:"status,omitempty"`
Limit *int64 `protobuf:"varint,33,opt,name=limit,proto3,oneof" json:"limit,omitempty"`
Offset *int64 `protobuf:"varint,34,opt,name=offset,proto3,oneof" json:"offset,omitempty"`
}
func (x *JobFilter) Reset() {
*x = JobFilter{}
if protoimpl.UnsafeEnabled {
mi := &file_job_proto_msgTypes[5]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *JobFilter) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*JobFilter) ProtoMessage() {}
func (x *JobFilter) ProtoReflect() protoreflect.Message {
mi := &file_job_proto_msgTypes[5]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use JobFilter.ProtoReflect.Descriptor instead.
func (*JobFilter) Descriptor() ([]byte, []int) {
return file_job_proto_rawDescGZIP(), []int{5}
}
func (x *JobFilter) GetStatus() JobStatus {
if x != nil && x.Status != nil {
return *x.Status
}
return JobStatus_Draft
}
func (x *JobFilter) GetLimit() int64 {
if x != nil && x.Limit != nil {
return *x.Limit
}
return 0
}
func (x *JobFilter) GetOffset() int64 {
if x != nil && x.Offset != nil {
return *x.Offset
}
return 0
}
type JobDisplay struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Types that are assignable to Display:
// *JobDisplay_Archive
Display isJobDisplay_Display `protobuf_oneof:"display"`
}
func (x *JobDisplay) Reset() {
*x = JobDisplay{}
if protoimpl.UnsafeEnabled {
mi := &file_job_proto_msgTypes[6]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *JobDisplay) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*JobDisplay) ProtoMessage() {}
func (x *JobDisplay) ProtoReflect() protoreflect.Message {
mi := &file_job_proto_msgTypes[6]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use JobDisplay.ProtoReflect.Descriptor instead.
func (*JobDisplay) Descriptor() ([]byte, []int) {
return file_job_proto_rawDescGZIP(), []int{6}
}
func (m *JobDisplay) GetDisplay() isJobDisplay_Display {
if m != nil {
return m.Display
}
return nil
}
func (x *JobDisplay) GetArchive() *JobDisplayArchive {
if x, ok := x.GetDisplay().(*JobDisplay_Archive); ok {
return x.Archive
}
return nil
}
type isJobDisplay_Display interface {
isJobDisplay_Display()
}
type JobDisplay_Archive struct {
Archive *JobDisplayArchive `protobuf:"bytes,1,opt,name=archive,proto3,oneof"`
}
func (*JobDisplay_Archive) isJobDisplay_Display() {}
var File_job_proto protoreflect.FileDescriptor
var file_job_proto_rawDesc = []byte{
0x0a, 0x09, 0x6a, 0x6f, 0x62, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x6a, 0x6f, 0x62,
0x1a, 0x11, 0x6a, 0x6f, 0x62, 0x5f, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x22, 0xc0, 0x01, 0x0a, 0x03, 0x4a, 0x6f, 0x62, 0x12, 0x0e, 0x0a, 0x02, 0x69,
0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x02, 0x69, 0x64, 0x12, 0x26, 0x0a, 0x06, 0x73,
0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0e, 0x2e, 0x6a, 0x6f,
0x62, 0x2e, 0x4a, 0x6f, 0x62, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61,
0x74, 0x75, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x18,
0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x12,
0x1f, 0x0a, 0x0b, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x04,
0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x69, 0x6d, 0x65,
0x12, 0x1f, 0x0a, 0x0b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18,
0x05, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x54, 0x69, 0x6d,
0x65, 0x12, 0x23, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b,
0x32, 0x0d, 0x2e, 0x6a, 0x6f, 0x62, 0x2e, 0x4a, 0x6f, 0x62, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52,
0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x22, 0x4d, 0x0a, 0x08, 0x4a, 0x6f, 0x62, 0x50, 0x61, 0x72,
0x61, 0x6d, 0x12, 0x38, 0x0a, 0x07, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x18, 0x01, 0x20,
0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x6a, 0x6f, 0x62, 0x5f, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76,
0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76,
0x65, 0x48, 0x00, 0x52, 0x07, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x42, 0x07, 0x0a, 0x05,
0x70, 0x61, 0x72, 0x61, 0x6d, 0x22, 0x4d, 0x0a, 0x08, 0x4a, 0x6f, 0x62, 0x53, 0x74, 0x61, 0x74,
0x65, 0x12, 0x38, 0x0a, 0x07, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x18, 0x01, 0x20, 0x01,
0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x6a, 0x6f, 0x62, 0x5f, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65,
0x2e, 0x4a, 0x6f, 0x62, 0x53, 0x74, 0x61, 0x74, 0x65, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65,
0x48, 0x00, 0x52, 0x07, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x42, 0x07, 0x0a, 0x05, 0x73,
0x74, 0x61, 0x74, 0x65, 0x22, 0x55, 0x0a, 0x0c, 0x4a, 0x6f, 0x62, 0x4e, 0x65, 0x78, 0x74, 0x50,
0x61, 0x72, 0x61, 0x6d, 0x12, 0x3c, 0x0a, 0x07, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x18,
0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x6a, 0x6f, 0x62, 0x5f, 0x61, 0x72, 0x63, 0x68,
0x69, 0x76, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x4e, 0x65,
0x78, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x48, 0x00, 0x52, 0x07, 0x61, 0x72, 0x63, 0x68, 0x69,
0x76, 0x65, 0x42, 0x07, 0x0a, 0x05, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x22, 0x4f, 0x0a, 0x0c, 0x43,
0x72, 0x65, 0x61, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x1a, 0x0a, 0x08, 0x70,
0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x70,
0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x12, 0x23, 0x0a, 0x05, 0x70, 0x61, 0x72, 0x61, 0x6d,
0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x6a, 0x6f, 0x62, 0x2e, 0x4a, 0x6f, 0x62,
0x50, 0x61, 0x72, 0x61, 0x6d, 0x52, 0x05, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x22, 0x90, 0x01, 0x0a,
0x09, 0x4a, 0x6f, 0x62, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x2b, 0x0a, 0x06, 0x73, 0x74,
0x61, 0x74, 0x75, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0e, 0x2e, 0x6a, 0x6f, 0x62,
0x2e, 0x4a, 0x6f, 0x62, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x48, 0x00, 0x52, 0x06, 0x73, 0x74,
0x61, 0x74, 0x75, 0x73, 0x88, 0x01, 0x01, 0x12, 0x19, 0x0a, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74,
0x18, 0x21, 0x20, 0x01, 0x28, 0x03, 0x48, 0x01, 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x88,
0x01, 0x01, 0x12, 0x1b, 0x0a, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x22, 0x20, 0x01,
0x28, 0x03, 0x48, 0x02, 0x52, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x88, 0x01, 0x01, 0x42,
0x09, 0x0a, 0x07, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x6c,
0x69, 0x6d, 0x69, 0x74, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x22,
0x53, 0x0a, 0x0a, 0x4a, 0x6f, 0x62, 0x44, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x12, 0x3a, 0x0a,
0x07, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e,
0x2e, 0x6a, 0x6f, 0x62, 0x5f, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x2e, 0x4a, 0x6f, 0x62,
0x44, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x48, 0x00,
0x52, 0x07, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x42, 0x09, 0x0a, 0x07, 0x64, 0x69, 0x73,
0x70, 0x6c, 0x61, 0x79, 0x2a, 0x5d, 0x0a, 0x09, 0x4a, 0x6f, 0x62, 0x53, 0x74, 0x61, 0x74, 0x75,
0x73, 0x12, 0x09, 0x0a, 0x05, 0x44, 0x72, 0x61, 0x66, 0x74, 0x10, 0x00, 0x12, 0x0c, 0x0a, 0x08,
0x4e, 0x6f, 0x74, 0x52, 0x65, 0x61, 0x64, 0x79, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x50, 0x65,
0x6e, 0x64, 0x69, 0x6e, 0x67, 0x10, 0x02, 0x12, 0x0e, 0x0a, 0x0a, 0x50, 0x72, 0x6f, 0x63, 0x65,
0x73, 0x73, 0x69, 0x6e, 0x67, 0x10, 0x03, 0x12, 0x0d, 0x0a, 0x09, 0x43, 0x6f, 0x6d, 0x70, 0x6c,
0x65, 0x74, 0x65, 0x64, 0x10, 0x04, 0x12, 0x0b, 0x0a, 0x06, 0x46, 0x61, 0x69, 0x6c, 0x65, 0x64,
0x10, 0xff, 0x01, 0x42, 0x28, 0x5a, 0x26, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f,
0x6d, 0x2f, 0x61, 0x62, 0x63, 0x39, 0x35, 0x30, 0x33, 0x30, 0x39, 0x2f, 0x74, 0x61, 0x70, 0x65,
0x77, 0x72, 0x69, 0x74, 0x65, 0x72, 0x2f, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x62, 0x06, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_job_proto_rawDescOnce sync.Once
file_job_proto_rawDescData = file_job_proto_rawDesc
)
func file_job_proto_rawDescGZIP() []byte {
file_job_proto_rawDescOnce.Do(func() {
file_job_proto_rawDescData = protoimpl.X.CompressGZIP(file_job_proto_rawDescData)
})
return file_job_proto_rawDescData
}
var file_job_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
var file_job_proto_msgTypes = make([]protoimpl.MessageInfo, 7)
var file_job_proto_goTypes = []interface{}{
(JobStatus)(0), // 0: job.JobStatus
(*Job)(nil), // 1: job.Job
(*JobParam)(nil), // 2: job.JobParam
(*JobState)(nil), // 3: job.JobState
(*JobNextParam)(nil), // 4: job.JobNextParam
(*CreatableJob)(nil), // 5: job.CreatableJob
(*JobFilter)(nil), // 6: job.JobFilter
(*JobDisplay)(nil), // 7: job.JobDisplay
(*JobParamArchive)(nil), // 8: job_archive.JobParamArchive
(*JobStateArchive)(nil), // 9: job_archive.JobStateArchive
(*JobArchiveNextParam)(nil), // 10: job_archive.JobArchiveNextParam
(*JobDisplayArchive)(nil), // 11: job_archive.JobDisplayArchive
}
var file_job_proto_depIdxs = []int32{
0, // 0: job.Job.status:type_name -> job.JobStatus
3, // 1: job.Job.state:type_name -> job.JobState
8, // 2: job.JobParam.Archive:type_name -> job_archive.JobParamArchive
9, // 3: job.JobState.Archive:type_name -> job_archive.JobStateArchive
10, // 4: job.JobNextParam.archive:type_name -> job_archive.JobArchiveNextParam
2, // 5: job.CreatableJob.param:type_name -> job.JobParam
0, // 6: job.JobFilter.status:type_name -> job.JobStatus
11, // 7: job.JobDisplay.archive:type_name -> job_archive.JobDisplayArchive
8, // [8:8] is the sub-list for method output_type
8, // [8:8] is the sub-list for method input_type
8, // [8:8] is the sub-list for extension type_name
8, // [8:8] is the sub-list for extension extendee
0, // [0:8] is the sub-list for field type_name
}
func init() { file_job_proto_init() }
func file_job_proto_init() {
if File_job_proto != nil {
return
}
file_job_archive_proto_init()
if !protoimpl.UnsafeEnabled {
file_job_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Job); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_job_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*JobParam); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_job_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*JobState); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_job_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*JobNextParam); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_job_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*CreatableJob); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_job_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*JobFilter); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_job_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*JobDisplay); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
file_job_proto_msgTypes[1].OneofWrappers = []interface{}{
(*JobParam_Archive)(nil),
}
file_job_proto_msgTypes[2].OneofWrappers = []interface{}{
(*JobState_Archive)(nil),
}
file_job_proto_msgTypes[3].OneofWrappers = []interface{}{
(*JobNextParam_Archive)(nil),
}
file_job_proto_msgTypes[5].OneofWrappers = []interface{}{}
file_job_proto_msgTypes[6].OneofWrappers = []interface{}{
(*JobDisplay_Archive)(nil),
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_job_proto_rawDesc,
NumEnums: 1,
NumMessages: 7,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_job_proto_goTypes,
DependencyIndexes: file_job_proto_depIdxs,
EnumInfos: file_job_proto_enumTypes,
MessageInfos: file_job_proto_msgTypes,
}.Build()
File_job_proto = out.File
file_job_proto_rawDesc = nil
file_job_proto_goTypes = nil
file_job_proto_depIdxs = nil
}

61
entity/job.proto Normal file
View File

@@ -0,0 +1,61 @@
syntax = "proto3";
package job;
option go_package = "github.com/abc950309/tapewriter/entity";
import "job_archive.proto";
enum JobStatus {
Draft = 0;
NotReady = 1; // dependencies not satisfied
Pending = 2; // waiting in queue
Processing = 3;
Completed = 4;
Failed = 255;
}
message Job {
int64 id = 1;
JobStatus status = 2;
int64 priority = 3;
int64 create_time = 4;
int64 update_time = 5;
JobState state = 17;
}
message JobParam {
oneof param {
job_archive.JobParamArchive Archive = 1;
}
}
message JobState {
oneof state {
job_archive.JobStateArchive Archive = 1;
}
}
message JobNextParam {
oneof param {
job_archive.JobArchiveNextParam archive = 1;
}
}
message CreatableJob {
int64 priority = 3;
JobParam param = 17;
}
message JobFilter {
optional JobStatus status = 1;
optional int64 limit = 33;
optional int64 offset = 34;
}
message JobDisplay {
oneof display {
job_archive.JobDisplayArchive archive = 1;
}
}

708
entity/job_archive.pb.go Normal file
View File

@@ -0,0 +1,708 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.28.1
// protoc v3.21.10
// source: job_archive.proto
package entity
import (
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
type JobArchiveStep int32
const (
JobArchiveStep_Pending JobArchiveStep = 0
JobArchiveStep_WaitForTape JobArchiveStep = 1
JobArchiveStep_Copying JobArchiveStep = 2
JobArchiveStep_Finished JobArchiveStep = 255
)
// Enum value maps for JobArchiveStep.
var (
JobArchiveStep_name = map[int32]string{
0: "Pending",
1: "WaitForTape",
2: "Copying",
255: "Finished",
}
JobArchiveStep_value = map[string]int32{
"Pending": 0,
"WaitForTape": 1,
"Copying": 2,
"Finished": 255,
}
)
func (x JobArchiveStep) Enum() *JobArchiveStep {
p := new(JobArchiveStep)
*p = x
return p
}
func (x JobArchiveStep) String() string {
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
}
func (JobArchiveStep) Descriptor() protoreflect.EnumDescriptor {
return file_job_archive_proto_enumTypes[0].Descriptor()
}
func (JobArchiveStep) Type() protoreflect.EnumType {
return &file_job_archive_proto_enumTypes[0]
}
func (x JobArchiveStep) Number() protoreflect.EnumNumber {
return protoreflect.EnumNumber(x)
}
// Deprecated: Use JobArchiveStep.Descriptor instead.
func (JobArchiveStep) EnumDescriptor() ([]byte, []int) {
return file_job_archive_proto_rawDescGZIP(), []int{0}
}
type JobParamArchive struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Sources []*Source `protobuf:"bytes,1,rep,name=sources,proto3" json:"sources,omitempty"`
}
func (x *JobParamArchive) Reset() {
*x = JobParamArchive{}
if protoimpl.UnsafeEnabled {
mi := &file_job_archive_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *JobParamArchive) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*JobParamArchive) ProtoMessage() {}
func (x *JobParamArchive) ProtoReflect() protoreflect.Message {
mi := &file_job_archive_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use JobParamArchive.ProtoReflect.Descriptor instead.
func (*JobParamArchive) Descriptor() ([]byte, []int) {
return file_job_archive_proto_rawDescGZIP(), []int{0}
}
func (x *JobParamArchive) GetSources() []*Source {
if x != nil {
return x.Sources
}
return nil
}
type JobArchiveNextParam struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Types that are assignable to Param:
// *JobArchiveNextParam_WaitForTape
// *JobArchiveNextParam_Copying
// *JobArchiveNextParam_Finished
Param isJobArchiveNextParam_Param `protobuf_oneof:"param"`
}
func (x *JobArchiveNextParam) Reset() {
*x = JobArchiveNextParam{}
if protoimpl.UnsafeEnabled {
mi := &file_job_archive_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *JobArchiveNextParam) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*JobArchiveNextParam) ProtoMessage() {}
func (x *JobArchiveNextParam) ProtoReflect() protoreflect.Message {
mi := &file_job_archive_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use JobArchiveNextParam.ProtoReflect.Descriptor instead.
func (*JobArchiveNextParam) Descriptor() ([]byte, []int) {
return file_job_archive_proto_rawDescGZIP(), []int{1}
}
func (m *JobArchiveNextParam) GetParam() isJobArchiveNextParam_Param {
if m != nil {
return m.Param
}
return nil
}
func (x *JobArchiveNextParam) GetWaitForTape() *JobArchiveWaitForTapeParam {
if x, ok := x.GetParam().(*JobArchiveNextParam_WaitForTape); ok {
return x.WaitForTape
}
return nil
}
func (x *JobArchiveNextParam) GetCopying() *JobArchiveCopyingParam {
if x, ok := x.GetParam().(*JobArchiveNextParam_Copying); ok {
return x.Copying
}
return nil
}
func (x *JobArchiveNextParam) GetFinished() *JobArchiveFinishedParam {
if x, ok := x.GetParam().(*JobArchiveNextParam_Finished); ok {
return x.Finished
}
return nil
}
type isJobArchiveNextParam_Param interface {
isJobArchiveNextParam_Param()
}
type JobArchiveNextParam_WaitForTape struct {
WaitForTape *JobArchiveWaitForTapeParam `protobuf:"bytes,1,opt,name=WaitForTape,proto3,oneof"`
}
type JobArchiveNextParam_Copying struct {
Copying *JobArchiveCopyingParam `protobuf:"bytes,2,opt,name=Copying,proto3,oneof"`
}
type JobArchiveNextParam_Finished struct {
Finished *JobArchiveFinishedParam `protobuf:"bytes,255,opt,name=Finished,proto3,oneof"`
}
func (*JobArchiveNextParam_WaitForTape) isJobArchiveNextParam_Param() {}
func (*JobArchiveNextParam_Copying) isJobArchiveNextParam_Param() {}
func (*JobArchiveNextParam_Finished) isJobArchiveNextParam_Param() {}
type JobArchiveWaitForTapeParam struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
}
func (x *JobArchiveWaitForTapeParam) Reset() {
*x = JobArchiveWaitForTapeParam{}
if protoimpl.UnsafeEnabled {
mi := &file_job_archive_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *JobArchiveWaitForTapeParam) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*JobArchiveWaitForTapeParam) ProtoMessage() {}
func (x *JobArchiveWaitForTapeParam) ProtoReflect() protoreflect.Message {
mi := &file_job_archive_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use JobArchiveWaitForTapeParam.ProtoReflect.Descriptor instead.
func (*JobArchiveWaitForTapeParam) Descriptor() ([]byte, []int) {
return file_job_archive_proto_rawDescGZIP(), []int{2}
}
type JobArchiveCopyingParam struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Device string `protobuf:"bytes,1,opt,name=device,proto3" json:"device,omitempty"`
Barcode string `protobuf:"bytes,2,opt,name=barcode,proto3" json:"barcode,omitempty"`
Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"`
}
func (x *JobArchiveCopyingParam) Reset() {
*x = JobArchiveCopyingParam{}
if protoimpl.UnsafeEnabled {
mi := &file_job_archive_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *JobArchiveCopyingParam) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*JobArchiveCopyingParam) ProtoMessage() {}
func (x *JobArchiveCopyingParam) ProtoReflect() protoreflect.Message {
mi := &file_job_archive_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use JobArchiveCopyingParam.ProtoReflect.Descriptor instead.
func (*JobArchiveCopyingParam) Descriptor() ([]byte, []int) {
return file_job_archive_proto_rawDescGZIP(), []int{3}
}
func (x *JobArchiveCopyingParam) GetDevice() string {
if x != nil {
return x.Device
}
return ""
}
func (x *JobArchiveCopyingParam) GetBarcode() string {
if x != nil {
return x.Barcode
}
return ""
}
func (x *JobArchiveCopyingParam) GetName() string {
if x != nil {
return x.Name
}
return ""
}
type JobArchiveFinishedParam struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
}
func (x *JobArchiveFinishedParam) Reset() {
*x = JobArchiveFinishedParam{}
if protoimpl.UnsafeEnabled {
mi := &file_job_archive_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *JobArchiveFinishedParam) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*JobArchiveFinishedParam) ProtoMessage() {}
func (x *JobArchiveFinishedParam) ProtoReflect() protoreflect.Message {
mi := &file_job_archive_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use JobArchiveFinishedParam.ProtoReflect.Descriptor instead.
func (*JobArchiveFinishedParam) Descriptor() ([]byte, []int) {
return file_job_archive_proto_rawDescGZIP(), []int{4}
}
type JobStateArchive struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Step JobArchiveStep `protobuf:"varint,1,opt,name=step,proto3,enum=job_archive.JobArchiveStep" json:"step,omitempty"`
Sources []*SourceState `protobuf:"bytes,2,rep,name=sources,proto3" json:"sources,omitempty"`
}
func (x *JobStateArchive) Reset() {
*x = JobStateArchive{}
if protoimpl.UnsafeEnabled {
mi := &file_job_archive_proto_msgTypes[5]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *JobStateArchive) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*JobStateArchive) ProtoMessage() {}
func (x *JobStateArchive) ProtoReflect() protoreflect.Message {
mi := &file_job_archive_proto_msgTypes[5]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use JobStateArchive.ProtoReflect.Descriptor instead.
func (*JobStateArchive) Descriptor() ([]byte, []int) {
return file_job_archive_proto_rawDescGZIP(), []int{5}
}
func (x *JobStateArchive) GetStep() JobArchiveStep {
if x != nil {
return x.Step
}
return JobArchiveStep_Pending
}
func (x *JobStateArchive) GetSources() []*SourceState {
if x != nil {
return x.Sources
}
return nil
}
type JobDisplayArchive struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
CopyedBytes int64 `protobuf:"varint,1,opt,name=copyedBytes,proto3" json:"copyedBytes,omitempty"`
CopyedFiles int64 `protobuf:"varint,2,opt,name=copyedFiles,proto3" json:"copyedFiles,omitempty"`
TotalBytes int64 `protobuf:"varint,3,opt,name=totalBytes,proto3" json:"totalBytes,omitempty"`
TotalFiles int64 `protobuf:"varint,4,opt,name=totalFiles,proto3" json:"totalFiles,omitempty"`
Speed int64 `protobuf:"varint,5,opt,name=speed,proto3" json:"speed,omitempty"`
}
func (x *JobDisplayArchive) Reset() {
*x = JobDisplayArchive{}
if protoimpl.UnsafeEnabled {
mi := &file_job_archive_proto_msgTypes[6]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *JobDisplayArchive) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*JobDisplayArchive) ProtoMessage() {}
func (x *JobDisplayArchive) ProtoReflect() protoreflect.Message {
mi := &file_job_archive_proto_msgTypes[6]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use JobDisplayArchive.ProtoReflect.Descriptor instead.
func (*JobDisplayArchive) Descriptor() ([]byte, []int) {
return file_job_archive_proto_rawDescGZIP(), []int{6}
}
func (x *JobDisplayArchive) GetCopyedBytes() int64 {
if x != nil {
return x.CopyedBytes
}
return 0
}
func (x *JobDisplayArchive) GetCopyedFiles() int64 {
if x != nil {
return x.CopyedFiles
}
return 0
}
func (x *JobDisplayArchive) GetTotalBytes() int64 {
if x != nil {
return x.TotalBytes
}
return 0
}
func (x *JobDisplayArchive) GetTotalFiles() int64 {
if x != nil {
return x.TotalFiles
}
return 0
}
func (x *JobDisplayArchive) GetSpeed() int64 {
if x != nil {
return x.Speed
}
return 0
}
var File_job_archive_proto protoreflect.FileDescriptor
var file_job_archive_proto_rawDesc = []byte{
0x0a, 0x11, 0x6a, 0x6f, 0x62, 0x5f, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x12, 0x0b, 0x6a, 0x6f, 0x62, 0x5f, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65,
0x1a, 0x0c, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x3b,
0x0a, 0x0f, 0x4a, 0x6f, 0x62, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76,
0x65, 0x12, 0x28, 0x0a, 0x07, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03,
0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x53, 0x6f, 0x75, 0x72,
0x63, 0x65, 0x52, 0x07, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x22, 0xf1, 0x01, 0x0a, 0x13,
0x4a, 0x6f, 0x62, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x4e, 0x65, 0x78, 0x74, 0x50, 0x61,
0x72, 0x61, 0x6d, 0x12, 0x4b, 0x0a, 0x0b, 0x57, 0x61, 0x69, 0x74, 0x46, 0x6f, 0x72, 0x54, 0x61,
0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x6a, 0x6f, 0x62, 0x5f, 0x61,
0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76,
0x65, 0x57, 0x61, 0x69, 0x74, 0x46, 0x6f, 0x72, 0x54, 0x61, 0x70, 0x65, 0x50, 0x61, 0x72, 0x61,
0x6d, 0x48, 0x00, 0x52, 0x0b, 0x57, 0x61, 0x69, 0x74, 0x46, 0x6f, 0x72, 0x54, 0x61, 0x70, 0x65,
0x12, 0x3f, 0x0a, 0x07, 0x43, 0x6f, 0x70, 0x79, 0x69, 0x6e, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28,
0x0b, 0x32, 0x23, 0x2e, 0x6a, 0x6f, 0x62, 0x5f, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x2e,
0x4a, 0x6f, 0x62, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x43, 0x6f, 0x70, 0x79, 0x69, 0x6e,
0x67, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x48, 0x00, 0x52, 0x07, 0x43, 0x6f, 0x70, 0x79, 0x69, 0x6e,
0x67, 0x12, 0x43, 0x0a, 0x08, 0x46, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x18, 0xff, 0x01,
0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x6a, 0x6f, 0x62, 0x5f, 0x61, 0x72, 0x63, 0x68, 0x69,
0x76, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x46, 0x69, 0x6e,
0x69, 0x73, 0x68, 0x65, 0x64, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x48, 0x00, 0x52, 0x08, 0x46, 0x69,
0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x42, 0x07, 0x0a, 0x05, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x22,
0x1c, 0x0a, 0x1a, 0x4a, 0x6f, 0x62, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x57, 0x61, 0x69,
0x74, 0x46, 0x6f, 0x72, 0x54, 0x61, 0x70, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x22, 0x5e, 0x0a,
0x16, 0x4a, 0x6f, 0x62, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x43, 0x6f, 0x70, 0x79, 0x69,
0x6e, 0x67, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x12, 0x16, 0x0a, 0x06, 0x64, 0x65, 0x76, 0x69, 0x63,
0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x64, 0x65, 0x76, 0x69, 0x63, 0x65, 0x12,
0x18, 0x0a, 0x07, 0x62, 0x61, 0x72, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09,
0x52, 0x07, 0x62, 0x61, 0x72, 0x63, 0x6f, 0x64, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d,
0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x19, 0x0a,
0x17, 0x4a, 0x6f, 0x62, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x46, 0x69, 0x6e, 0x69, 0x73,
0x68, 0x65, 0x64, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x22, 0x71, 0x0a, 0x0f, 0x4a, 0x6f, 0x62, 0x53,
0x74, 0x61, 0x74, 0x65, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x12, 0x2f, 0x0a, 0x04, 0x73,
0x74, 0x65, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1b, 0x2e, 0x6a, 0x6f, 0x62, 0x5f,
0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x41, 0x72, 0x63, 0x68, 0x69,
0x76, 0x65, 0x53, 0x74, 0x65, 0x70, 0x52, 0x04, 0x73, 0x74, 0x65, 0x70, 0x12, 0x2d, 0x0a, 0x07,
0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e,
0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61,
0x74, 0x65, 0x52, 0x07, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x22, 0xad, 0x01, 0x0a, 0x11,
0x4a, 0x6f, 0x62, 0x44, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76,
0x65, 0x12, 0x20, 0x0a, 0x0b, 0x63, 0x6f, 0x70, 0x79, 0x65, 0x64, 0x42, 0x79, 0x74, 0x65, 0x73,
0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0b, 0x63, 0x6f, 0x70, 0x79, 0x65, 0x64, 0x42, 0x79,
0x74, 0x65, 0x73, 0x12, 0x20, 0x0a, 0x0b, 0x63, 0x6f, 0x70, 0x79, 0x65, 0x64, 0x46, 0x69, 0x6c,
0x65, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0b, 0x63, 0x6f, 0x70, 0x79, 0x65, 0x64,
0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x42, 0x79,
0x74, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c,
0x42, 0x79, 0x74, 0x65, 0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x46, 0x69,
0x6c, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c,
0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x70, 0x65, 0x65, 0x64, 0x18, 0x05,
0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x73, 0x70, 0x65, 0x65, 0x64, 0x2a, 0x4a, 0x0a, 0x0e, 0x4a,
0x6f, 0x62, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x53, 0x74, 0x65, 0x70, 0x12, 0x0b, 0x0a,
0x07, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x10, 0x00, 0x12, 0x0f, 0x0a, 0x0b, 0x57, 0x61,
0x69, 0x74, 0x46, 0x6f, 0x72, 0x54, 0x61, 0x70, 0x65, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x43,
0x6f, 0x70, 0x79, 0x69, 0x6e, 0x67, 0x10, 0x02, 0x12, 0x0d, 0x0a, 0x08, 0x46, 0x69, 0x6e, 0x69,
0x73, 0x68, 0x65, 0x64, 0x10, 0xff, 0x01, 0x42, 0x28, 0x5a, 0x26, 0x67, 0x69, 0x74, 0x68, 0x75,
0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x61, 0x62, 0x63, 0x39, 0x35, 0x30, 0x33, 0x30, 0x39, 0x2f,
0x74, 0x61, 0x70, 0x65, 0x77, 0x72, 0x69, 0x74, 0x65, 0x72, 0x2f, 0x65, 0x6e, 0x74, 0x69, 0x74,
0x79, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_job_archive_proto_rawDescOnce sync.Once
file_job_archive_proto_rawDescData = file_job_archive_proto_rawDesc
)
func file_job_archive_proto_rawDescGZIP() []byte {
file_job_archive_proto_rawDescOnce.Do(func() {
file_job_archive_proto_rawDescData = protoimpl.X.CompressGZIP(file_job_archive_proto_rawDescData)
})
return file_job_archive_proto_rawDescData
}
var file_job_archive_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
var file_job_archive_proto_msgTypes = make([]protoimpl.MessageInfo, 7)
var file_job_archive_proto_goTypes = []interface{}{
(JobArchiveStep)(0), // 0: job_archive.JobArchiveStep
(*JobParamArchive)(nil), // 1: job_archive.JobParamArchive
(*JobArchiveNextParam)(nil), // 2: job_archive.JobArchiveNextParam
(*JobArchiveWaitForTapeParam)(nil), // 3: job_archive.JobArchiveWaitForTapeParam
(*JobArchiveCopyingParam)(nil), // 4: job_archive.JobArchiveCopyingParam
(*JobArchiveFinishedParam)(nil), // 5: job_archive.JobArchiveFinishedParam
(*JobStateArchive)(nil), // 6: job_archive.JobStateArchive
(*JobDisplayArchive)(nil), // 7: job_archive.JobDisplayArchive
(*Source)(nil), // 8: source.Source
(*SourceState)(nil), // 9: source.SourceState
}
var file_job_archive_proto_depIdxs = []int32{
8, // 0: job_archive.JobParamArchive.sources:type_name -> source.Source
3, // 1: job_archive.JobArchiveNextParam.WaitForTape:type_name -> job_archive.JobArchiveWaitForTapeParam
4, // 2: job_archive.JobArchiveNextParam.Copying:type_name -> job_archive.JobArchiveCopyingParam
5, // 3: job_archive.JobArchiveNextParam.Finished:type_name -> job_archive.JobArchiveFinishedParam
0, // 4: job_archive.JobStateArchive.step:type_name -> job_archive.JobArchiveStep
9, // 5: job_archive.JobStateArchive.sources:type_name -> source.SourceState
6, // [6:6] is the sub-list for method output_type
6, // [6:6] is the sub-list for method input_type
6, // [6:6] is the sub-list for extension type_name
6, // [6:6] is the sub-list for extension extendee
0, // [0:6] is the sub-list for field type_name
}
func init() { file_job_archive_proto_init() }
func file_job_archive_proto_init() {
if File_job_archive_proto != nil {
return
}
file_source_proto_init()
if !protoimpl.UnsafeEnabled {
file_job_archive_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*JobParamArchive); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_job_archive_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*JobArchiveNextParam); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_job_archive_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*JobArchiveWaitForTapeParam); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_job_archive_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*JobArchiveCopyingParam); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_job_archive_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*JobArchiveFinishedParam); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_job_archive_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*JobStateArchive); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_job_archive_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*JobDisplayArchive); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
file_job_archive_proto_msgTypes[1].OneofWrappers = []interface{}{
(*JobArchiveNextParam_WaitForTape)(nil),
(*JobArchiveNextParam_Copying)(nil),
(*JobArchiveNextParam_Finished)(nil),
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_job_archive_proto_rawDesc,
NumEnums: 1,
NumMessages: 7,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_job_archive_proto_goTypes,
DependencyIndexes: file_job_archive_proto_depIdxs,
EnumInfos: file_job_archive_proto_enumTypes,
MessageInfos: file_job_archive_proto_msgTypes,
}.Build()
File_job_archive_proto = out.File
file_job_archive_proto_rawDesc = nil
file_job_archive_proto_goTypes = nil
file_job_archive_proto_depIdxs = nil
}

49
entity/job_archive.proto Normal file
View File

@@ -0,0 +1,49 @@
syntax = "proto3";
package job_archive;
option go_package = "github.com/abc950309/tapewriter/entity";
import "source.proto";
enum JobArchiveStep {
Pending = 0;
WaitForTape = 1;
Copying = 2;
Finished = 255;
}
message JobParamArchive {
repeated source.Source sources = 1;
}
message JobArchiveNextParam {
oneof param {
JobArchiveWaitForTapeParam WaitForTape = 1;
JobArchiveCopyingParam Copying = 2;
JobArchiveFinishedParam Finished = 255;
}
}
message JobArchiveWaitForTapeParam {}
message JobArchiveCopyingParam {
string device = 1;
string barcode = 2;
string name = 3;
}
message JobArchiveFinishedParam {}
message JobStateArchive {
JobArchiveStep step = 1;
repeated source.SourceState sources = 2;
}
message JobDisplayArchive {
int64 copyedBytes = 1;
int64 copyedFiles = 2;
int64 totalBytes = 3;
int64 totalFiles = 4;
optional int64 speed = 5;
}

791
entity/job_restore.pb.go Normal file
View File

@@ -0,0 +1,791 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.28.1
// protoc v3.21.10
// source: job_restore.proto
package entity
import (
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
type JobRestoreStep int32
const (
JobRestoreStep_Pending JobRestoreStep = 0
JobRestoreStep_WaitForTape JobRestoreStep = 1
JobRestoreStep_Copying JobRestoreStep = 2
JobRestoreStep_Finished JobRestoreStep = 255
)
// Enum value maps for JobRestoreStep.
var (
JobRestoreStep_name = map[int32]string{
0: "Pending",
1: "WaitForTape",
2: "Copying",
255: "Finished",
}
JobRestoreStep_value = map[string]int32{
"Pending": 0,
"WaitForTape": 1,
"Copying": 2,
"Finished": 255,
}
)
func (x JobRestoreStep) Enum() *JobRestoreStep {
p := new(JobRestoreStep)
*p = x
return p
}
func (x JobRestoreStep) String() string {
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
}
func (JobRestoreStep) Descriptor() protoreflect.EnumDescriptor {
return file_job_restore_proto_enumTypes[0].Descriptor()
}
func (JobRestoreStep) Type() protoreflect.EnumType {
return &file_job_restore_proto_enumTypes[0]
}
func (x JobRestoreStep) Number() protoreflect.EnumNumber {
return protoreflect.EnumNumber(x)
}
// Deprecated: Use JobRestoreStep.Descriptor instead.
func (JobRestoreStep) EnumDescriptor() ([]byte, []int) {
return file_job_restore_proto_rawDescGZIP(), []int{0}
}
type JobParamRestore struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
FileIds []int64 `protobuf:"varint,1,rep,packed,name=file_ids,json=fileIds,proto3" json:"file_ids,omitempty"`
}
func (x *JobParamRestore) Reset() {
*x = JobParamRestore{}
if protoimpl.UnsafeEnabled {
mi := &file_job_restore_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *JobParamRestore) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*JobParamRestore) ProtoMessage() {}
func (x *JobParamRestore) ProtoReflect() protoreflect.Message {
mi := &file_job_restore_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use JobParamRestore.ProtoReflect.Descriptor instead.
func (*JobParamRestore) Descriptor() ([]byte, []int) {
return file_job_restore_proto_rawDescGZIP(), []int{0}
}
func (x *JobParamRestore) GetFileIds() []int64 {
if x != nil {
return x.FileIds
}
return nil
}
type JobRestoreNextParam struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Types that are assignable to Param:
// *JobRestoreNextParam_WaitForTape
// *JobRestoreNextParam_Copying
// *JobRestoreNextParam_Finished
Param isJobRestoreNextParam_Param `protobuf_oneof:"param"`
}
func (x *JobRestoreNextParam) Reset() {
*x = JobRestoreNextParam{}
if protoimpl.UnsafeEnabled {
mi := &file_job_restore_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *JobRestoreNextParam) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*JobRestoreNextParam) ProtoMessage() {}
func (x *JobRestoreNextParam) ProtoReflect() protoreflect.Message {
mi := &file_job_restore_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use JobRestoreNextParam.ProtoReflect.Descriptor instead.
func (*JobRestoreNextParam) Descriptor() ([]byte, []int) {
return file_job_restore_proto_rawDescGZIP(), []int{1}
}
func (m *JobRestoreNextParam) GetParam() isJobRestoreNextParam_Param {
if m != nil {
return m.Param
}
return nil
}
func (x *JobRestoreNextParam) GetWaitForTape() *JobRestoreWaitForTapeParam {
if x, ok := x.GetParam().(*JobRestoreNextParam_WaitForTape); ok {
return x.WaitForTape
}
return nil
}
func (x *JobRestoreNextParam) GetCopying() *JobRestoreCopyingParam {
if x, ok := x.GetParam().(*JobRestoreNextParam_Copying); ok {
return x.Copying
}
return nil
}
func (x *JobRestoreNextParam) GetFinished() *JobRestoreFinishedParam {
if x, ok := x.GetParam().(*JobRestoreNextParam_Finished); ok {
return x.Finished
}
return nil
}
type isJobRestoreNextParam_Param interface {
isJobRestoreNextParam_Param()
}
type JobRestoreNextParam_WaitForTape struct {
WaitForTape *JobRestoreWaitForTapeParam `protobuf:"bytes,1,opt,name=WaitForTape,proto3,oneof"`
}
type JobRestoreNextParam_Copying struct {
Copying *JobRestoreCopyingParam `protobuf:"bytes,2,opt,name=Copying,proto3,oneof"`
}
type JobRestoreNextParam_Finished struct {
Finished *JobRestoreFinishedParam `protobuf:"bytes,255,opt,name=Finished,proto3,oneof"`
}
func (*JobRestoreNextParam_WaitForTape) isJobRestoreNextParam_Param() {}
func (*JobRestoreNextParam_Copying) isJobRestoreNextParam_Param() {}
func (*JobRestoreNextParam_Finished) isJobRestoreNextParam_Param() {}
type JobRestoreWaitForTapeParam struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
}
func (x *JobRestoreWaitForTapeParam) Reset() {
*x = JobRestoreWaitForTapeParam{}
if protoimpl.UnsafeEnabled {
mi := &file_job_restore_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *JobRestoreWaitForTapeParam) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*JobRestoreWaitForTapeParam) ProtoMessage() {}
func (x *JobRestoreWaitForTapeParam) ProtoReflect() protoreflect.Message {
mi := &file_job_restore_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use JobRestoreWaitForTapeParam.ProtoReflect.Descriptor instead.
func (*JobRestoreWaitForTapeParam) Descriptor() ([]byte, []int) {
return file_job_restore_proto_rawDescGZIP(), []int{2}
}
type JobRestoreCopyingParam struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Device string `protobuf:"bytes,1,opt,name=device,proto3" json:"device,omitempty"`
}
func (x *JobRestoreCopyingParam) Reset() {
*x = JobRestoreCopyingParam{}
if protoimpl.UnsafeEnabled {
mi := &file_job_restore_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *JobRestoreCopyingParam) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*JobRestoreCopyingParam) ProtoMessage() {}
func (x *JobRestoreCopyingParam) ProtoReflect() protoreflect.Message {
mi := &file_job_restore_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use JobRestoreCopyingParam.ProtoReflect.Descriptor instead.
func (*JobRestoreCopyingParam) Descriptor() ([]byte, []int) {
return file_job_restore_proto_rawDescGZIP(), []int{3}
}
func (x *JobRestoreCopyingParam) GetDevice() string {
if x != nil {
return x.Device
}
return ""
}
type JobRestoreFinishedParam struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
}
func (x *JobRestoreFinishedParam) Reset() {
*x = JobRestoreFinishedParam{}
if protoimpl.UnsafeEnabled {
mi := &file_job_restore_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *JobRestoreFinishedParam) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*JobRestoreFinishedParam) ProtoMessage() {}
func (x *JobRestoreFinishedParam) ProtoReflect() protoreflect.Message {
mi := &file_job_restore_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use JobRestoreFinishedParam.ProtoReflect.Descriptor instead.
func (*JobRestoreFinishedParam) Descriptor() ([]byte, []int) {
return file_job_restore_proto_rawDescGZIP(), []int{4}
}
type FileRestoreState struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
FileId int64 `protobuf:"varint,1,opt,name=file_id,json=fileId,proto3" json:"file_id,omitempty"`
Status CopyStatus `protobuf:"varint,2,opt,name=status,proto3,enum=copy_status.CopyStatus" json:"status,omitempty"`
TapeId int64 `protobuf:"varint,17,opt,name=tape_id,json=tapeId,proto3" json:"tape_id,omitempty"`
PositionId int64 `protobuf:"varint,18,opt,name=position_id,json=positionId,proto3" json:"position_id,omitempty"`
PathInTape []string `protobuf:"bytes,19,rep,name=path_in_tape,json=pathInTape,proto3" json:"path_in_tape,omitempty"`
}
func (x *FileRestoreState) Reset() {
*x = FileRestoreState{}
if protoimpl.UnsafeEnabled {
mi := &file_job_restore_proto_msgTypes[5]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *FileRestoreState) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*FileRestoreState) ProtoMessage() {}
func (x *FileRestoreState) ProtoReflect() protoreflect.Message {
mi := &file_job_restore_proto_msgTypes[5]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use FileRestoreState.ProtoReflect.Descriptor instead.
func (*FileRestoreState) Descriptor() ([]byte, []int) {
return file_job_restore_proto_rawDescGZIP(), []int{5}
}
func (x *FileRestoreState) GetFileId() int64 {
if x != nil {
return x.FileId
}
return 0
}
func (x *FileRestoreState) GetStatus() CopyStatus {
if x != nil {
return x.Status
}
return CopyStatus_Draft
}
func (x *FileRestoreState) GetTapeId() int64 {
if x != nil {
return x.TapeId
}
return 0
}
func (x *FileRestoreState) GetPositionId() int64 {
if x != nil {
return x.PositionId
}
return 0
}
func (x *FileRestoreState) GetPathInTape() []string {
if x != nil {
return x.PathInTape
}
return nil
}
type JobStateRestore struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Step JobRestoreStep `protobuf:"varint,1,opt,name=step,proto3,enum=job_restore.JobRestoreStep" json:"step,omitempty"`
Files []*FileRestoreState `protobuf:"bytes,2,rep,name=files,proto3" json:"files,omitempty"`
}
func (x *JobStateRestore) Reset() {
*x = JobStateRestore{}
if protoimpl.UnsafeEnabled {
mi := &file_job_restore_proto_msgTypes[6]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *JobStateRestore) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*JobStateRestore) ProtoMessage() {}
func (x *JobStateRestore) ProtoReflect() protoreflect.Message {
mi := &file_job_restore_proto_msgTypes[6]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use JobStateRestore.ProtoReflect.Descriptor instead.
func (*JobStateRestore) Descriptor() ([]byte, []int) {
return file_job_restore_proto_rawDescGZIP(), []int{6}
}
func (x *JobStateRestore) GetStep() JobRestoreStep {
if x != nil {
return x.Step
}
return JobRestoreStep_Pending
}
func (x *JobStateRestore) GetFiles() []*FileRestoreState {
if x != nil {
return x.Files
}
return nil
}
type JobDisplayRestore struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
CopyedBytes int64 `protobuf:"varint,1,opt,name=copyedBytes,proto3" json:"copyedBytes,omitempty"`
CopyedFiles int64 `protobuf:"varint,2,opt,name=copyedFiles,proto3" json:"copyedFiles,omitempty"`
TotalBytes int64 `protobuf:"varint,3,opt,name=totalBytes,proto3" json:"totalBytes,omitempty"`
TotalFiles int64 `protobuf:"varint,4,opt,name=totalFiles,proto3" json:"totalFiles,omitempty"`
Logs []byte `protobuf:"bytes,17,opt,name=logs,proto3" json:"logs,omitempty"`
}
func (x *JobDisplayRestore) Reset() {
*x = JobDisplayRestore{}
if protoimpl.UnsafeEnabled {
mi := &file_job_restore_proto_msgTypes[7]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *JobDisplayRestore) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*JobDisplayRestore) ProtoMessage() {}
func (x *JobDisplayRestore) ProtoReflect() protoreflect.Message {
mi := &file_job_restore_proto_msgTypes[7]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use JobDisplayRestore.ProtoReflect.Descriptor instead.
func (*JobDisplayRestore) Descriptor() ([]byte, []int) {
return file_job_restore_proto_rawDescGZIP(), []int{7}
}
func (x *JobDisplayRestore) GetCopyedBytes() int64 {
if x != nil {
return x.CopyedBytes
}
return 0
}
func (x *JobDisplayRestore) GetCopyedFiles() int64 {
if x != nil {
return x.CopyedFiles
}
return 0
}
func (x *JobDisplayRestore) GetTotalBytes() int64 {
if x != nil {
return x.TotalBytes
}
return 0
}
func (x *JobDisplayRestore) GetTotalFiles() int64 {
if x != nil {
return x.TotalFiles
}
return 0
}
func (x *JobDisplayRestore) GetLogs() []byte {
if x != nil {
return x.Logs
}
return nil
}
var File_job_restore_proto protoreflect.FileDescriptor
var file_job_restore_proto_rawDesc = []byte{
0x0a, 0x11, 0x6a, 0x6f, 0x62, 0x5f, 0x72, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x12, 0x0b, 0x6a, 0x6f, 0x62, 0x5f, 0x72, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65,
0x1a, 0x11, 0x63, 0x6f, 0x70, 0x79, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x22, 0x2c, 0x0a, 0x0f, 0x4a, 0x6f, 0x62, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x52,
0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x69,
0x64, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x03, 0x52, 0x07, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x64,
0x73, 0x22, 0xf1, 0x01, 0x0a, 0x13, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65,
0x4e, 0x65, 0x78, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x12, 0x4b, 0x0a, 0x0b, 0x57, 0x61, 0x69,
0x74, 0x46, 0x6f, 0x72, 0x54, 0x61, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27,
0x2e, 0x6a, 0x6f, 0x62, 0x5f, 0x72, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x4a, 0x6f, 0x62,
0x52, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x57, 0x61, 0x69, 0x74, 0x46, 0x6f, 0x72, 0x54, 0x61,
0x70, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x48, 0x00, 0x52, 0x0b, 0x57, 0x61, 0x69, 0x74, 0x46,
0x6f, 0x72, 0x54, 0x61, 0x70, 0x65, 0x12, 0x3f, 0x0a, 0x07, 0x43, 0x6f, 0x70, 0x79, 0x69, 0x6e,
0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x6a, 0x6f, 0x62, 0x5f, 0x72, 0x65,
0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65,
0x43, 0x6f, 0x70, 0x79, 0x69, 0x6e, 0x67, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x48, 0x00, 0x52, 0x07,
0x43, 0x6f, 0x70, 0x79, 0x69, 0x6e, 0x67, 0x12, 0x43, 0x0a, 0x08, 0x46, 0x69, 0x6e, 0x69, 0x73,
0x68, 0x65, 0x64, 0x18, 0xff, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x6a, 0x6f, 0x62,
0x5f, 0x72, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x74,
0x6f, 0x72, 0x65, 0x46, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x50, 0x61, 0x72, 0x61, 0x6d,
0x48, 0x00, 0x52, 0x08, 0x46, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x42, 0x07, 0x0a, 0x05,
0x70, 0x61, 0x72, 0x61, 0x6d, 0x22, 0x1c, 0x0a, 0x1a, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x74,
0x6f, 0x72, 0x65, 0x57, 0x61, 0x69, 0x74, 0x46, 0x6f, 0x72, 0x54, 0x61, 0x70, 0x65, 0x50, 0x61,
0x72, 0x61, 0x6d, 0x22, 0x30, 0x0a, 0x16, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x74, 0x6f, 0x72,
0x65, 0x43, 0x6f, 0x70, 0x79, 0x69, 0x6e, 0x67, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x12, 0x16, 0x0a,
0x06, 0x64, 0x65, 0x76, 0x69, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x64,
0x65, 0x76, 0x69, 0x63, 0x65, 0x22, 0x19, 0x0a, 0x17, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x74,
0x6f, 0x72, 0x65, 0x46, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x50, 0x61, 0x72, 0x61, 0x6d,
0x22, 0xb8, 0x01, 0x0a, 0x10, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65,
0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x69, 0x64,
0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x64, 0x12, 0x2f,
0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x17,
0x2e, 0x63, 0x6f, 0x70, 0x79, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x2e, 0x43, 0x6f, 0x70,
0x79, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12,
0x17, 0x0a, 0x07, 0x74, 0x61, 0x70, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x11, 0x20, 0x01, 0x28, 0x03,
0x52, 0x06, 0x74, 0x61, 0x70, 0x65, 0x49, 0x64, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x6f, 0x73, 0x69,
0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x12, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x70,
0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x20, 0x0a, 0x0c, 0x70, 0x61, 0x74,
0x68, 0x5f, 0x69, 0x6e, 0x5f, 0x74, 0x61, 0x70, 0x65, 0x18, 0x13, 0x20, 0x03, 0x28, 0x09, 0x52,
0x0a, 0x70, 0x61, 0x74, 0x68, 0x49, 0x6e, 0x54, 0x61, 0x70, 0x65, 0x22, 0x77, 0x0a, 0x0f, 0x4a,
0x6f, 0x62, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x12, 0x2f,
0x0a, 0x04, 0x73, 0x74, 0x65, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1b, 0x2e, 0x6a,
0x6f, 0x62, 0x5f, 0x72, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x52, 0x65,
0x73, 0x74, 0x6f, 0x72, 0x65, 0x53, 0x74, 0x65, 0x70, 0x52, 0x04, 0x73, 0x74, 0x65, 0x70, 0x12,
0x33, 0x0a, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d,
0x2e, 0x6a, 0x6f, 0x62, 0x5f, 0x72, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x69, 0x6c,
0x65, 0x52, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x66,
0x69, 0x6c, 0x65, 0x73, 0x22, 0xab, 0x01, 0x0a, 0x11, 0x4a, 0x6f, 0x62, 0x44, 0x69, 0x73, 0x70,
0x6c, 0x61, 0x79, 0x52, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x63, 0x6f,
0x70, 0x79, 0x65, 0x64, 0x42, 0x79, 0x74, 0x65, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52,
0x0b, 0x63, 0x6f, 0x70, 0x79, 0x65, 0x64, 0x42, 0x79, 0x74, 0x65, 0x73, 0x12, 0x20, 0x0a, 0x0b,
0x63, 0x6f, 0x70, 0x79, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28,
0x03, 0x52, 0x0b, 0x63, 0x6f, 0x70, 0x79, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x1e,
0x0a, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x42, 0x79, 0x74, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01,
0x28, 0x03, 0x52, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x42, 0x79, 0x74, 0x65, 0x73, 0x12, 0x1e,
0x0a, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01,
0x28, 0x03, 0x52, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x12,
0x0a, 0x04, 0x6c, 0x6f, 0x67, 0x73, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x6c, 0x6f,
0x67, 0x73, 0x2a, 0x4a, 0x0a, 0x0e, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65,
0x53, 0x74, 0x65, 0x70, 0x12, 0x0b, 0x0a, 0x07, 0x50, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x10,
0x00, 0x12, 0x0f, 0x0a, 0x0b, 0x57, 0x61, 0x69, 0x74, 0x46, 0x6f, 0x72, 0x54, 0x61, 0x70, 0x65,
0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x43, 0x6f, 0x70, 0x79, 0x69, 0x6e, 0x67, 0x10, 0x02, 0x12,
0x0d, 0x0a, 0x08, 0x46, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x10, 0xff, 0x01, 0x42, 0x28,
0x5a, 0x26, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x61, 0x62, 0x63,
0x39, 0x35, 0x30, 0x33, 0x30, 0x39, 0x2f, 0x74, 0x61, 0x70, 0x65, 0x77, 0x72, 0x69, 0x74, 0x65,
0x72, 0x2f, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_job_restore_proto_rawDescOnce sync.Once
file_job_restore_proto_rawDescData = file_job_restore_proto_rawDesc
)
func file_job_restore_proto_rawDescGZIP() []byte {
file_job_restore_proto_rawDescOnce.Do(func() {
file_job_restore_proto_rawDescData = protoimpl.X.CompressGZIP(file_job_restore_proto_rawDescData)
})
return file_job_restore_proto_rawDescData
}
var file_job_restore_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
var file_job_restore_proto_msgTypes = make([]protoimpl.MessageInfo, 8)
var file_job_restore_proto_goTypes = []interface{}{
(JobRestoreStep)(0), // 0: job_restore.JobRestoreStep
(*JobParamRestore)(nil), // 1: job_restore.JobParamRestore
(*JobRestoreNextParam)(nil), // 2: job_restore.JobRestoreNextParam
(*JobRestoreWaitForTapeParam)(nil), // 3: job_restore.JobRestoreWaitForTapeParam
(*JobRestoreCopyingParam)(nil), // 4: job_restore.JobRestoreCopyingParam
(*JobRestoreFinishedParam)(nil), // 5: job_restore.JobRestoreFinishedParam
(*FileRestoreState)(nil), // 6: job_restore.FileRestoreState
(*JobStateRestore)(nil), // 7: job_restore.JobStateRestore
(*JobDisplayRestore)(nil), // 8: job_restore.JobDisplayRestore
(CopyStatus)(0), // 9: copy_status.CopyStatus
}
var file_job_restore_proto_depIdxs = []int32{
3, // 0: job_restore.JobRestoreNextParam.WaitForTape:type_name -> job_restore.JobRestoreWaitForTapeParam
4, // 1: job_restore.JobRestoreNextParam.Copying:type_name -> job_restore.JobRestoreCopyingParam
5, // 2: job_restore.JobRestoreNextParam.Finished:type_name -> job_restore.JobRestoreFinishedParam
9, // 3: job_restore.FileRestoreState.status:type_name -> copy_status.CopyStatus
0, // 4: job_restore.JobStateRestore.step:type_name -> job_restore.JobRestoreStep
6, // 5: job_restore.JobStateRestore.files:type_name -> job_restore.FileRestoreState
6, // [6:6] is the sub-list for method output_type
6, // [6:6] is the sub-list for method input_type
6, // [6:6] is the sub-list for extension type_name
6, // [6:6] is the sub-list for extension extendee
0, // [0:6] is the sub-list for field type_name
}
func init() { file_job_restore_proto_init() }
func file_job_restore_proto_init() {
if File_job_restore_proto != nil {
return
}
file_copy_status_proto_init()
if !protoimpl.UnsafeEnabled {
file_job_restore_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*JobParamRestore); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_job_restore_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*JobRestoreNextParam); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_job_restore_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*JobRestoreWaitForTapeParam); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_job_restore_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*JobRestoreCopyingParam); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_job_restore_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*JobRestoreFinishedParam); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_job_restore_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*FileRestoreState); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_job_restore_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*JobStateRestore); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_job_restore_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*JobDisplayRestore); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
file_job_restore_proto_msgTypes[1].OneofWrappers = []interface{}{
(*JobRestoreNextParam_WaitForTape)(nil),
(*JobRestoreNextParam_Copying)(nil),
(*JobRestoreNextParam_Finished)(nil),
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_job_restore_proto_rawDesc,
NumEnums: 1,
NumMessages: 8,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_job_restore_proto_goTypes,
DependencyIndexes: file_job_restore_proto_depIdxs,
EnumInfos: file_job_restore_proto_enumTypes,
MessageInfos: file_job_restore_proto_msgTypes,
}.Build()
File_job_restore_proto = out.File
file_job_restore_proto_rawDesc = nil
file_job_restore_proto_goTypes = nil
file_job_restore_proto_depIdxs = nil
}

56
entity/job_restore.proto Normal file
View File

@@ -0,0 +1,56 @@
syntax = "proto3";
package job_restore;
option go_package = "github.com/abc950309/tapewriter/entity";
import "copy_status.proto";
enum JobRestoreStep {
Pending = 0;
WaitForTape = 1;
Copying = 2;
Finished = 255;
}
message JobParamRestore {
repeated int64 file_ids = 1;
}
message JobRestoreNextParam {
oneof param {
JobRestoreWaitForTapeParam WaitForTape = 1;
JobRestoreCopyingParam Copying = 2;
JobRestoreFinishedParam Finished = 255;
}
}
message JobRestoreWaitForTapeParam {}
message JobRestoreCopyingParam {
string device = 1;
}
message JobRestoreFinishedParam {}
message FileRestoreState {
int64 file_id = 1;
copy_status.CopyStatus status = 2;
int64 tape_id = 17;
int64 position_id = 18;
repeated string path_in_tape = 19;
}
message JobStateRestore {
JobRestoreStep step = 1;
repeated FileRestoreState files = 2;
}
message JobDisplayRestore {
int64 copyedBytes = 1;
int64 copyedFiles = 2;
int64 totalBytes = 3;
int64 totalFiles = 4;
bytes logs = 17;
}

219
entity/position.pb.go Normal file
View File

@@ -0,0 +1,219 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.28.1
// protoc v3.21.10
// source: position.proto
package entity
import (
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
type Position struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Id int64 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"`
FileId int64 `protobuf:"varint,2,opt,name=file_id,json=fileId,proto3" json:"file_id,omitempty"`
TapeId int64 `protobuf:"varint,3,opt,name=tape_id,json=tapeId,proto3" json:"tape_id,omitempty"`
Path string `protobuf:"bytes,4,opt,name=path,proto3" json:"path,omitempty"`
Mode int64 `protobuf:"varint,17,opt,name=mode,proto3" json:"mode,omitempty"`
ModTime int64 `protobuf:"varint,18,opt,name=mod_time,json=modTime,proto3" json:"mod_time,omitempty"`
WriteTime int64 `protobuf:"varint,19,opt,name=write_time,json=writeTime,proto3" json:"write_time,omitempty"`
Size int64 `protobuf:"varint,20,opt,name=size,proto3" json:"size,omitempty"`
Hash []byte `protobuf:"bytes,21,opt,name=hash,proto3" json:"hash,omitempty"`
}
func (x *Position) Reset() {
*x = Position{}
if protoimpl.UnsafeEnabled {
mi := &file_position_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Position) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Position) ProtoMessage() {}
func (x *Position) ProtoReflect() protoreflect.Message {
mi := &file_position_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Position.ProtoReflect.Descriptor instead.
func (*Position) Descriptor() ([]byte, []int) {
return file_position_proto_rawDescGZIP(), []int{0}
}
func (x *Position) GetId() int64 {
if x != nil {
return x.Id
}
return 0
}
func (x *Position) GetFileId() int64 {
if x != nil {
return x.FileId
}
return 0
}
func (x *Position) GetTapeId() int64 {
if x != nil {
return x.TapeId
}
return 0
}
func (x *Position) GetPath() string {
if x != nil {
return x.Path
}
return ""
}
func (x *Position) GetMode() int64 {
if x != nil {
return x.Mode
}
return 0
}
func (x *Position) GetModTime() int64 {
if x != nil {
return x.ModTime
}
return 0
}
func (x *Position) GetWriteTime() int64 {
if x != nil {
return x.WriteTime
}
return 0
}
func (x *Position) GetSize() int64 {
if x != nil {
return x.Size
}
return 0
}
func (x *Position) GetHash() []byte {
if x != nil {
return x.Hash
}
return nil
}
var File_position_proto protoreflect.FileDescriptor
var file_position_proto_rawDesc = []byte{
0x0a, 0x0e, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x12, 0x08, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0xd6, 0x01, 0x0a, 0x08, 0x50,
0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20,
0x01, 0x28, 0x03, 0x52, 0x02, 0x69, 0x64, 0x12, 0x17, 0x0a, 0x07, 0x66, 0x69, 0x6c, 0x65, 0x5f,
0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x64,
0x12, 0x17, 0x0a, 0x07, 0x74, 0x61, 0x70, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28,
0x03, 0x52, 0x06, 0x74, 0x61, 0x70, 0x65, 0x49, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74,
0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x12, 0x0a,
0x04, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x11, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x6d, 0x6f, 0x64,
0x65, 0x12, 0x19, 0x0a, 0x08, 0x6d, 0x6f, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x12, 0x20,
0x01, 0x28, 0x03, 0x52, 0x07, 0x6d, 0x6f, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x1d, 0x0a, 0x0a,
0x77, 0x72, 0x69, 0x74, 0x65, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x13, 0x20, 0x01, 0x28, 0x03,
0x52, 0x09, 0x77, 0x72, 0x69, 0x74, 0x65, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x73,
0x69, 0x7a, 0x65, 0x18, 0x14, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x12,
0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x15, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x68,
0x61, 0x73, 0x68, 0x42, 0x28, 0x5a, 0x26, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f,
0x6d, 0x2f, 0x61, 0x62, 0x63, 0x39, 0x35, 0x30, 0x33, 0x30, 0x39, 0x2f, 0x74, 0x61, 0x70, 0x65,
0x77, 0x72, 0x69, 0x74, 0x65, 0x72, 0x2f, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x62, 0x06, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_position_proto_rawDescOnce sync.Once
file_position_proto_rawDescData = file_position_proto_rawDesc
)
func file_position_proto_rawDescGZIP() []byte {
file_position_proto_rawDescOnce.Do(func() {
file_position_proto_rawDescData = protoimpl.X.CompressGZIP(file_position_proto_rawDescData)
})
return file_position_proto_rawDescData
}
var file_position_proto_msgTypes = make([]protoimpl.MessageInfo, 1)
var file_position_proto_goTypes = []interface{}{
(*Position)(nil), // 0: position.Position
}
var file_position_proto_depIdxs = []int32{
0, // [0:0] is the sub-list for method output_type
0, // [0:0] is the sub-list for method input_type
0, // [0:0] is the sub-list for extension type_name
0, // [0:0] is the sub-list for extension extendee
0, // [0:0] is the sub-list for field type_name
}
func init() { file_position_proto_init() }
func file_position_proto_init() {
if File_position_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_position_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Position); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_position_proto_rawDesc,
NumEnums: 0,
NumMessages: 1,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_position_proto_goTypes,
DependencyIndexes: file_position_proto_depIdxs,
MessageInfos: file_position_proto_msgTypes,
}.Build()
File_position_proto = out.File
file_position_proto_rawDesc = nil
file_position_proto_goTypes = nil
file_position_proto_depIdxs = nil
}

16
entity/position.proto Normal file
View File

@@ -0,0 +1,16 @@
syntax = "proto3";
package position;
option go_package = "github.com/abc950309/tapewriter/entity";
message Position {
int64 id = 1;
int64 file_id = 2;
int64 tape_id = 3;
string path = 4;
int64 mode = 17;
int64 mod_time = 18;
int64 write_time = 19;
int64 size = 20;
bytes hash = 21;
}

2004
entity/service.pb.go Normal file

File diff suppressed because it is too large Load Diff

145
entity/service.proto Normal file
View File

@@ -0,0 +1,145 @@
syntax = "proto3";
package service;
option go_package = "github.com/abc950309/tapewriter/entity";
import "job.proto";
import "file.proto";
import "position.proto";
import "tape.proto";
import "source.proto";
service Service {
rpc FileGet(FileGetRequest) returns (FileGetReply) {}
rpc FileEdit(FileEditRequest) returns (FileEditReply) {}
rpc FileMkdir(FileMkdirRequest) returns (FileMkdirReply) {}
rpc FileDelete(FileDeleteRequest) returns (FileDeleteReply) {}
rpc FileListParents(FileListParentsRequest) returns (FileListParentsReply) {}
rpc TapeMGet(TapeMGetRequest) returns (TapeMGetReply) {}
rpc JobList(JobListRequest) returns (JobListReply) {}
rpc JobCreate(JobCreateRequest) returns (JobCreateReply) {}
rpc JobNext(JobNextRequest) returns (JobNextReply) {}
rpc JobDisplay(JobDisplayRequest) returns (JobDisplayReply) {}
rpc JobGetLog(JobGetLogRequest) returns (JobGetLogReply) {}
rpc SourceList(SourceListRequest) returns (SourceListReply) {}
rpc DeviceList(DeviceListRequest) returns (DeviceListReply) {}
}
message FileGetRequest {
int64 id = 1;
}
message FileGetReply {
optional file.File file = 1;
repeated position.Position positions = 2;
repeated file.File children = 17;
}
message FileEditRequest {
int64 id = 1;
file.EditedFile file = 2;
}
message FileEditReply {
file.File file = 1;
}
message FileMkdirRequest {
int64 parent_id = 1;
string path = 2;
}
message FileMkdirReply {
file.File file = 1;
}
message FileDeleteRequest {
repeated int64 ids = 1;
}
message FileDeleteReply {
}
message FileListParentsRequest {
int64 id = 1;
}
message FileListParentsReply {
repeated file.File parents = 1;
}
message TapeMGetRequest {
repeated int64 ids = 1;
}
message TapeMGetReply {
repeated tape.Tape tapes = 1;
}
message JobListRequest {
oneof param {
JobMGetRequest mget = 1;
job.JobFilter list = 2;
}
}
message JobMGetRequest {
repeated int64 ids = 1;
}
message JobListReply {
repeated job.Job jobs = 1;
}
message JobCreateRequest {
job.CreatableJob job = 1;
}
message JobCreateReply {
job.Job job = 1;
}
message JobNextRequest {
int64 id = 1;
job.JobNextParam param = 2;
}
message JobNextReply {
job.Job job = 1;
}
message JobDisplayRequest {
int64 id = 1;
}
message JobDisplayReply {
job.JobDisplay display = 1;
}
message JobGetLogRequest {
int64 job_id = 1;
optional int64 offset = 2;
}
message JobGetLogReply {
bytes logs = 1;
}
message SourceListRequest {
string path = 1;
}
message SourceListReply {
source.SourceFile file = 1;
repeated source.SourceFile chain = 2;
repeated source.SourceFile children = 17;
}
message DeviceListRequest {}
message DeviceListReply {
repeated string devices = 1;
}

16
entity/service_gen.sh Executable file
View File

@@ -0,0 +1,16 @@
#!/usr/bin/env bash
set -ex
CURDIR=$(cd $(dirname $0); pwd);
cd ${CURDIR};
SRC_DIR=${CURDIR};
GO_DST_DIR=${CURDIR};
TS_DST_DIR=${CURDIR}/../frontend/src/apis;
protoc --go_out=$GO_DST_DIR --go_opt=paths=source_relative \
--go-grpc_out=$GO_DST_DIR --go-grpc_opt=paths=source_relative \
-I=$SRC_DIR `ls *.proto`;
# --js_out=import_style=es6,binary:$TS_DST_DIR \
# --grpc-web_out=import_style=typescript,mode=grpcwebtext:$TS_DST_DIR \

537
entity/service_grpc.pb.go Normal file
View File

@@ -0,0 +1,537 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
// - protoc-gen-go-grpc v1.2.0
// - protoc v3.21.10
// source: service.proto
package entity
import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
)
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
// Requires gRPC-Go v1.32.0 or later.
const _ = grpc.SupportPackageIsVersion7
// ServiceClient is the client API for Service service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type ServiceClient interface {
FileGet(ctx context.Context, in *FileGetRequest, opts ...grpc.CallOption) (*FileGetReply, error)
FileEdit(ctx context.Context, in *FileEditRequest, opts ...grpc.CallOption) (*FileEditReply, error)
FileMkdir(ctx context.Context, in *FileMkdirRequest, opts ...grpc.CallOption) (*FileMkdirReply, error)
FileDelete(ctx context.Context, in *FileDeleteRequest, opts ...grpc.CallOption) (*FileDeleteReply, error)
FileListParents(ctx context.Context, in *FileListParentsRequest, opts ...grpc.CallOption) (*FileListParentsReply, error)
TapeMGet(ctx context.Context, in *TapeMGetRequest, opts ...grpc.CallOption) (*TapeMGetReply, error)
JobList(ctx context.Context, in *JobListRequest, opts ...grpc.CallOption) (*JobListReply, error)
JobCreate(ctx context.Context, in *JobCreateRequest, opts ...grpc.CallOption) (*JobCreateReply, error)
JobNext(ctx context.Context, in *JobNextRequest, opts ...grpc.CallOption) (*JobNextReply, error)
JobDisplay(ctx context.Context, in *JobDisplayRequest, opts ...grpc.CallOption) (*JobDisplayReply, error)
JobGetLog(ctx context.Context, in *JobGetLogRequest, opts ...grpc.CallOption) (*JobGetLogReply, error)
SourceList(ctx context.Context, in *SourceListRequest, opts ...grpc.CallOption) (*SourceListReply, error)
DeviceList(ctx context.Context, in *DeviceListRequest, opts ...grpc.CallOption) (*DeviceListReply, error)
}
type serviceClient struct {
cc grpc.ClientConnInterface
}
func NewServiceClient(cc grpc.ClientConnInterface) ServiceClient {
return &serviceClient{cc}
}
func (c *serviceClient) FileGet(ctx context.Context, in *FileGetRequest, opts ...grpc.CallOption) (*FileGetReply, error) {
out := new(FileGetReply)
err := c.cc.Invoke(ctx, "/service.Service/FileGet", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *serviceClient) FileEdit(ctx context.Context, in *FileEditRequest, opts ...grpc.CallOption) (*FileEditReply, error) {
out := new(FileEditReply)
err := c.cc.Invoke(ctx, "/service.Service/FileEdit", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *serviceClient) FileMkdir(ctx context.Context, in *FileMkdirRequest, opts ...grpc.CallOption) (*FileMkdirReply, error) {
out := new(FileMkdirReply)
err := c.cc.Invoke(ctx, "/service.Service/FileMkdir", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *serviceClient) FileDelete(ctx context.Context, in *FileDeleteRequest, opts ...grpc.CallOption) (*FileDeleteReply, error) {
out := new(FileDeleteReply)
err := c.cc.Invoke(ctx, "/service.Service/FileDelete", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *serviceClient) FileListParents(ctx context.Context, in *FileListParentsRequest, opts ...grpc.CallOption) (*FileListParentsReply, error) {
out := new(FileListParentsReply)
err := c.cc.Invoke(ctx, "/service.Service/FileListParents", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *serviceClient) TapeMGet(ctx context.Context, in *TapeMGetRequest, opts ...grpc.CallOption) (*TapeMGetReply, error) {
out := new(TapeMGetReply)
err := c.cc.Invoke(ctx, "/service.Service/TapeMGet", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *serviceClient) JobList(ctx context.Context, in *JobListRequest, opts ...grpc.CallOption) (*JobListReply, error) {
out := new(JobListReply)
err := c.cc.Invoke(ctx, "/service.Service/JobList", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *serviceClient) JobCreate(ctx context.Context, in *JobCreateRequest, opts ...grpc.CallOption) (*JobCreateReply, error) {
out := new(JobCreateReply)
err := c.cc.Invoke(ctx, "/service.Service/JobCreate", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *serviceClient) JobNext(ctx context.Context, in *JobNextRequest, opts ...grpc.CallOption) (*JobNextReply, error) {
out := new(JobNextReply)
err := c.cc.Invoke(ctx, "/service.Service/JobNext", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *serviceClient) JobDisplay(ctx context.Context, in *JobDisplayRequest, opts ...grpc.CallOption) (*JobDisplayReply, error) {
out := new(JobDisplayReply)
err := c.cc.Invoke(ctx, "/service.Service/JobDisplay", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *serviceClient) JobGetLog(ctx context.Context, in *JobGetLogRequest, opts ...grpc.CallOption) (*JobGetLogReply, error) {
out := new(JobGetLogReply)
err := c.cc.Invoke(ctx, "/service.Service/JobGetLog", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *serviceClient) SourceList(ctx context.Context, in *SourceListRequest, opts ...grpc.CallOption) (*SourceListReply, error) {
out := new(SourceListReply)
err := c.cc.Invoke(ctx, "/service.Service/SourceList", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *serviceClient) DeviceList(ctx context.Context, in *DeviceListRequest, opts ...grpc.CallOption) (*DeviceListReply, error) {
out := new(DeviceListReply)
err := c.cc.Invoke(ctx, "/service.Service/DeviceList", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// ServiceServer is the server API for Service service.
// All implementations must embed UnimplementedServiceServer
// for forward compatibility
type ServiceServer interface {
FileGet(context.Context, *FileGetRequest) (*FileGetReply, error)
FileEdit(context.Context, *FileEditRequest) (*FileEditReply, error)
FileMkdir(context.Context, *FileMkdirRequest) (*FileMkdirReply, error)
FileDelete(context.Context, *FileDeleteRequest) (*FileDeleteReply, error)
FileListParents(context.Context, *FileListParentsRequest) (*FileListParentsReply, error)
TapeMGet(context.Context, *TapeMGetRequest) (*TapeMGetReply, error)
JobList(context.Context, *JobListRequest) (*JobListReply, error)
JobCreate(context.Context, *JobCreateRequest) (*JobCreateReply, error)
JobNext(context.Context, *JobNextRequest) (*JobNextReply, error)
JobDisplay(context.Context, *JobDisplayRequest) (*JobDisplayReply, error)
JobGetLog(context.Context, *JobGetLogRequest) (*JobGetLogReply, error)
SourceList(context.Context, *SourceListRequest) (*SourceListReply, error)
DeviceList(context.Context, *DeviceListRequest) (*DeviceListReply, error)
mustEmbedUnimplementedServiceServer()
}
// UnimplementedServiceServer must be embedded to have forward compatible implementations.
type UnimplementedServiceServer struct {
}
func (UnimplementedServiceServer) FileGet(context.Context, *FileGetRequest) (*FileGetReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method FileGet not implemented")
}
func (UnimplementedServiceServer) FileEdit(context.Context, *FileEditRequest) (*FileEditReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method FileEdit not implemented")
}
func (UnimplementedServiceServer) FileMkdir(context.Context, *FileMkdirRequest) (*FileMkdirReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method FileMkdir not implemented")
}
func (UnimplementedServiceServer) FileDelete(context.Context, *FileDeleteRequest) (*FileDeleteReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method FileDelete not implemented")
}
func (UnimplementedServiceServer) FileListParents(context.Context, *FileListParentsRequest) (*FileListParentsReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method FileListParents not implemented")
}
func (UnimplementedServiceServer) TapeMGet(context.Context, *TapeMGetRequest) (*TapeMGetReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method TapeMGet not implemented")
}
func (UnimplementedServiceServer) JobList(context.Context, *JobListRequest) (*JobListReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method JobList not implemented")
}
func (UnimplementedServiceServer) JobCreate(context.Context, *JobCreateRequest) (*JobCreateReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method JobCreate not implemented")
}
func (UnimplementedServiceServer) JobNext(context.Context, *JobNextRequest) (*JobNextReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method JobNext not implemented")
}
func (UnimplementedServiceServer) JobDisplay(context.Context, *JobDisplayRequest) (*JobDisplayReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method JobDisplay not implemented")
}
func (UnimplementedServiceServer) JobGetLog(context.Context, *JobGetLogRequest) (*JobGetLogReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method JobGetLog not implemented")
}
func (UnimplementedServiceServer) SourceList(context.Context, *SourceListRequest) (*SourceListReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method SourceList not implemented")
}
func (UnimplementedServiceServer) DeviceList(context.Context, *DeviceListRequest) (*DeviceListReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method DeviceList not implemented")
}
func (UnimplementedServiceServer) mustEmbedUnimplementedServiceServer() {}
// UnsafeServiceServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to ServiceServer will
// result in compilation errors.
type UnsafeServiceServer interface {
mustEmbedUnimplementedServiceServer()
}
func RegisterServiceServer(s grpc.ServiceRegistrar, srv ServiceServer) {
s.RegisterService(&Service_ServiceDesc, srv)
}
func _Service_FileGet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(FileGetRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ServiceServer).FileGet(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/service.Service/FileGet",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ServiceServer).FileGet(ctx, req.(*FileGetRequest))
}
return interceptor(ctx, in, info, handler)
}
func _Service_FileEdit_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(FileEditRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ServiceServer).FileEdit(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/service.Service/FileEdit",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ServiceServer).FileEdit(ctx, req.(*FileEditRequest))
}
return interceptor(ctx, in, info, handler)
}
func _Service_FileMkdir_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(FileMkdirRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ServiceServer).FileMkdir(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/service.Service/FileMkdir",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ServiceServer).FileMkdir(ctx, req.(*FileMkdirRequest))
}
return interceptor(ctx, in, info, handler)
}
func _Service_FileDelete_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(FileDeleteRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ServiceServer).FileDelete(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/service.Service/FileDelete",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ServiceServer).FileDelete(ctx, req.(*FileDeleteRequest))
}
return interceptor(ctx, in, info, handler)
}
func _Service_FileListParents_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(FileListParentsRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ServiceServer).FileListParents(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/service.Service/FileListParents",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ServiceServer).FileListParents(ctx, req.(*FileListParentsRequest))
}
return interceptor(ctx, in, info, handler)
}
func _Service_TapeMGet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(TapeMGetRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ServiceServer).TapeMGet(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/service.Service/TapeMGet",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ServiceServer).TapeMGet(ctx, req.(*TapeMGetRequest))
}
return interceptor(ctx, in, info, handler)
}
func _Service_JobList_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(JobListRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ServiceServer).JobList(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/service.Service/JobList",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ServiceServer).JobList(ctx, req.(*JobListRequest))
}
return interceptor(ctx, in, info, handler)
}
func _Service_JobCreate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(JobCreateRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ServiceServer).JobCreate(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/service.Service/JobCreate",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ServiceServer).JobCreate(ctx, req.(*JobCreateRequest))
}
return interceptor(ctx, in, info, handler)
}
func _Service_JobNext_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(JobNextRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ServiceServer).JobNext(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/service.Service/JobNext",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ServiceServer).JobNext(ctx, req.(*JobNextRequest))
}
return interceptor(ctx, in, info, handler)
}
func _Service_JobDisplay_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(JobDisplayRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ServiceServer).JobDisplay(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/service.Service/JobDisplay",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ServiceServer).JobDisplay(ctx, req.(*JobDisplayRequest))
}
return interceptor(ctx, in, info, handler)
}
func _Service_JobGetLog_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(JobGetLogRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ServiceServer).JobGetLog(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/service.Service/JobGetLog",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ServiceServer).JobGetLog(ctx, req.(*JobGetLogRequest))
}
return interceptor(ctx, in, info, handler)
}
func _Service_SourceList_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(SourceListRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ServiceServer).SourceList(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/service.Service/SourceList",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ServiceServer).SourceList(ctx, req.(*SourceListRequest))
}
return interceptor(ctx, in, info, handler)
}
func _Service_DeviceList_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(DeviceListRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ServiceServer).DeviceList(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/service.Service/DeviceList",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ServiceServer).DeviceList(ctx, req.(*DeviceListRequest))
}
return interceptor(ctx, in, info, handler)
}
// Service_ServiceDesc is the grpc.ServiceDesc for Service service.
// It's only intended for direct use with grpc.RegisterService,
// and not to be introspected or modified (even as a copy)
var Service_ServiceDesc = grpc.ServiceDesc{
ServiceName: "service.Service",
HandlerType: (*ServiceServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "FileGet",
Handler: _Service_FileGet_Handler,
},
{
MethodName: "FileEdit",
Handler: _Service_FileEdit_Handler,
},
{
MethodName: "FileMkdir",
Handler: _Service_FileMkdir_Handler,
},
{
MethodName: "FileDelete",
Handler: _Service_FileDelete_Handler,
},
{
MethodName: "FileListParents",
Handler: _Service_FileListParents_Handler,
},
{
MethodName: "TapeMGet",
Handler: _Service_TapeMGet_Handler,
},
{
MethodName: "JobList",
Handler: _Service_JobList_Handler,
},
{
MethodName: "JobCreate",
Handler: _Service_JobCreate_Handler,
},
{
MethodName: "JobNext",
Handler: _Service_JobNext_Handler,
},
{
MethodName: "JobDisplay",
Handler: _Service_JobDisplay_Handler,
},
{
MethodName: "JobGetLog",
Handler: _Service_JobGetLog_Handler,
},
{
MethodName: "SourceList",
Handler: _Service_SourceList_Handler,
},
{
MethodName: "DeviceList",
Handler: _Service_DeviceList_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "service.proto",
}

72
entity/source.go Normal file
View File

@@ -0,0 +1,72 @@
package entity
import (
"path"
"github.com/abc950309/acp"
)
func NewSourceFromACPJob(job *acp.Job) *Source {
return &Source{Base: job.Base, Path: job.Path}
}
func (x *Source) RealPath() string {
return x.Base + path.Join(x.Path...)
}
func (x *Source) Append(more ...string) *Source {
path := make([]string, len(x.Path)+len(more))
copy(path, x.Path)
copy(path[len(x.Path):], more)
return &Source{Base: x.Base, Path: path}
}
func (x *Source) Compare(xx *Source) int {
la, lb := len(x.Path), len(x.Path)
l := la
if lb < la {
l = lb
}
for idx := 0; idx < l; idx++ {
if x.Path[idx] < xx.Path[idx] {
return -1
}
if x.Path[idx] > xx.Path[idx] {
return 1
}
}
if la < lb {
return -1
}
if la > lb {
return 1
}
if x.Base < xx.Base {
return -1
}
if x.Base > xx.Base {
return -1
}
return 0
}
func (x *Source) Equal(xx *Source) bool {
la, lb := len(x.Path), len(x.Path)
if la != lb {
return false
}
for idx := 0; idx < la; idx++ {
if x.Path[idx] != xx.Path[idx] {
return false
}
}
return true
}

362
entity/source.pb.go Normal file
View File

@@ -0,0 +1,362 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.28.1
// protoc v3.21.10
// source: source.proto
package entity
import (
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
type SourceFile struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"`
ParentPath string `protobuf:"bytes,2,opt,name=parent_path,json=parentPath,proto3" json:"parent_path,omitempty"`
Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"`
Mode int64 `protobuf:"varint,17,opt,name=mode,proto3" json:"mode,omitempty"`
ModTime int64 `protobuf:"varint,18,opt,name=mod_time,json=modTime,proto3" json:"mod_time,omitempty"`
Size int64 `protobuf:"varint,19,opt,name=size,proto3" json:"size,omitempty"`
}
func (x *SourceFile) Reset() {
*x = SourceFile{}
if protoimpl.UnsafeEnabled {
mi := &file_source_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *SourceFile) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*SourceFile) ProtoMessage() {}
func (x *SourceFile) ProtoReflect() protoreflect.Message {
mi := &file_source_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use SourceFile.ProtoReflect.Descriptor instead.
func (*SourceFile) Descriptor() ([]byte, []int) {
return file_source_proto_rawDescGZIP(), []int{0}
}
func (x *SourceFile) GetPath() string {
if x != nil {
return x.Path
}
return ""
}
func (x *SourceFile) GetParentPath() string {
if x != nil {
return x.ParentPath
}
return ""
}
func (x *SourceFile) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *SourceFile) GetMode() int64 {
if x != nil {
return x.Mode
}
return 0
}
func (x *SourceFile) GetModTime() int64 {
if x != nil {
return x.ModTime
}
return 0
}
func (x *SourceFile) GetSize() int64 {
if x != nil {
return x.Size
}
return 0
}
type Source struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Base string `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"`
Path []string `protobuf:"bytes,2,rep,name=path,proto3" json:"path,omitempty"`
}
func (x *Source) Reset() {
*x = Source{}
if protoimpl.UnsafeEnabled {
mi := &file_source_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Source) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Source) ProtoMessage() {}
func (x *Source) ProtoReflect() protoreflect.Message {
mi := &file_source_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Source.ProtoReflect.Descriptor instead.
func (*Source) Descriptor() ([]byte, []int) {
return file_source_proto_rawDescGZIP(), []int{1}
}
func (x *Source) GetBase() string {
if x != nil {
return x.Base
}
return ""
}
func (x *Source) GetPath() []string {
if x != nil {
return x.Path
}
return nil
}
type SourceState struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Source *Source `protobuf:"bytes,1,opt,name=source,proto3" json:"source,omitempty"`
Size int64 `protobuf:"varint,2,opt,name=size,proto3" json:"size,omitempty"`
Status CopyStatus `protobuf:"varint,3,opt,name=status,proto3,enum=copy_status.CopyStatus" json:"status,omitempty"`
Message *string `protobuf:"bytes,4,opt,name=message,proto3,oneof" json:"message,omitempty"`
}
func (x *SourceState) Reset() {
*x = SourceState{}
if protoimpl.UnsafeEnabled {
mi := &file_source_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *SourceState) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*SourceState) ProtoMessage() {}
func (x *SourceState) ProtoReflect() protoreflect.Message {
mi := &file_source_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use SourceState.ProtoReflect.Descriptor instead.
func (*SourceState) Descriptor() ([]byte, []int) {
return file_source_proto_rawDescGZIP(), []int{2}
}
func (x *SourceState) GetSource() *Source {
if x != nil {
return x.Source
}
return nil
}
func (x *SourceState) GetSize() int64 {
if x != nil {
return x.Size
}
return 0
}
func (x *SourceState) GetStatus() CopyStatus {
if x != nil {
return x.Status
}
return CopyStatus_Draft
}
func (x *SourceState) GetMessage() string {
if x != nil && x.Message != nil {
return *x.Message
}
return ""
}
var File_source_proto protoreflect.FileDescriptor
var file_source_proto_rawDesc = []byte{
0x0a, 0x0c, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x06,
0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x1a, 0x11, 0x63, 0x6f, 0x70, 0x79, 0x5f, 0x73, 0x74, 0x61,
0x74, 0x75, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x98, 0x01, 0x0a, 0x0a, 0x53, 0x6f,
0x75, 0x72, 0x63, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68,
0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x1f, 0x0a, 0x0b,
0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28,
0x09, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x50, 0x61, 0x74, 0x68, 0x12, 0x12, 0x0a,
0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d,
0x65, 0x12, 0x12, 0x0a, 0x04, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x11, 0x20, 0x01, 0x28, 0x03, 0x52,
0x04, 0x6d, 0x6f, 0x64, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x6d, 0x6f, 0x64, 0x5f, 0x74, 0x69, 0x6d,
0x65, 0x18, 0x12, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x6d, 0x6f, 0x64, 0x54, 0x69, 0x6d, 0x65,
0x12, 0x12, 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x13, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04,
0x73, 0x69, 0x7a, 0x65, 0x22, 0x30, 0x0a, 0x06, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x12,
0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x62, 0x61,
0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09,
0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x22, 0xa5, 0x01, 0x0a, 0x0b, 0x53, 0x6f, 0x75, 0x72, 0x63,
0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x26, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e,
0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x12,
0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x73, 0x69,
0x7a, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x03, 0x20, 0x01,
0x28, 0x0e, 0x32, 0x17, 0x2e, 0x63, 0x6f, 0x70, 0x79, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73,
0x2e, 0x43, 0x6f, 0x70, 0x79, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61,
0x74, 0x75, 0x73, 0x12, 0x1d, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x04,
0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x88,
0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x42, 0x28,
0x5a, 0x26, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x61, 0x62, 0x63,
0x39, 0x35, 0x30, 0x33, 0x30, 0x39, 0x2f, 0x74, 0x61, 0x70, 0x65, 0x77, 0x72, 0x69, 0x74, 0x65,
0x72, 0x2f, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_source_proto_rawDescOnce sync.Once
file_source_proto_rawDescData = file_source_proto_rawDesc
)
func file_source_proto_rawDescGZIP() []byte {
file_source_proto_rawDescOnce.Do(func() {
file_source_proto_rawDescData = protoimpl.X.CompressGZIP(file_source_proto_rawDescData)
})
return file_source_proto_rawDescData
}
var file_source_proto_msgTypes = make([]protoimpl.MessageInfo, 3)
var file_source_proto_goTypes = []interface{}{
(*SourceFile)(nil), // 0: source.SourceFile
(*Source)(nil), // 1: source.Source
(*SourceState)(nil), // 2: source.SourceState
(CopyStatus)(0), // 3: copy_status.CopyStatus
}
var file_source_proto_depIdxs = []int32{
1, // 0: source.SourceState.source:type_name -> source.Source
3, // 1: source.SourceState.status:type_name -> copy_status.CopyStatus
2, // [2:2] is the sub-list for method output_type
2, // [2:2] is the sub-list for method input_type
2, // [2:2] is the sub-list for extension type_name
2, // [2:2] is the sub-list for extension extendee
0, // [0:2] is the sub-list for field type_name
}
func init() { file_source_proto_init() }
func file_source_proto_init() {
if File_source_proto != nil {
return
}
file_copy_status_proto_init()
if !protoimpl.UnsafeEnabled {
file_source_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*SourceFile); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_source_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Source); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_source_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*SourceState); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
file_source_proto_msgTypes[2].OneofWrappers = []interface{}{}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_source_proto_rawDesc,
NumEnums: 0,
NumMessages: 3,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_source_proto_goTypes,
DependencyIndexes: file_source_proto_depIdxs,
MessageInfos: file_source_proto_msgTypes,
}.Build()
File_source_proto = out.File
file_source_proto_rawDesc = nil
file_source_proto_goTypes = nil
file_source_proto_depIdxs = nil
}

28
entity/source.proto Normal file
View File

@@ -0,0 +1,28 @@
syntax = "proto3";
package source;
option go_package = "github.com/abc950309/tapewriter/entity";
import "copy_status.proto";
message SourceFile {
string path = 1;
string parent_path = 2;
string name = 3;
int64 mode = 17;
int64 mod_time = 18;
int64 size = 19;
}
message Source {
string base = 1;
repeated string path = 2;
}
message SourceState {
Source source = 1;
int64 size = 2;
copy_status.CopyStatus status = 3;
optional string message = 4;
}

214
entity/tape.pb.go Normal file
View File

@@ -0,0 +1,214 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.28.1
// protoc v3.21.10
// source: tape.proto
package entity
import (
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
type Tape struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Id int64 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"`
Barcode string `protobuf:"bytes,2,opt,name=barcode,proto3" json:"barcode,omitempty"`
Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"`
Encryption string `protobuf:"bytes,4,opt,name=encryption,proto3" json:"encryption,omitempty"`
CreateTime int64 `protobuf:"varint,17,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"`
DestroyTime *int64 `protobuf:"varint,18,opt,name=destroy_time,json=destroyTime,proto3,oneof" json:"destroy_time,omitempty"`
CapacityBytes int64 `protobuf:"varint,19,opt,name=capacity_bytes,json=capacityBytes,proto3" json:"capacity_bytes,omitempty"`
WritenBytes int64 `protobuf:"varint,20,opt,name=writen_bytes,json=writenBytes,proto3" json:"writen_bytes,omitempty"`
}
func (x *Tape) Reset() {
*x = Tape{}
if protoimpl.UnsafeEnabled {
mi := &file_tape_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Tape) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Tape) ProtoMessage() {}
func (x *Tape) ProtoReflect() protoreflect.Message {
mi := &file_tape_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Tape.ProtoReflect.Descriptor instead.
func (*Tape) Descriptor() ([]byte, []int) {
return file_tape_proto_rawDescGZIP(), []int{0}
}
func (x *Tape) GetId() int64 {
if x != nil {
return x.Id
}
return 0
}
func (x *Tape) GetBarcode() string {
if x != nil {
return x.Barcode
}
return ""
}
func (x *Tape) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *Tape) GetEncryption() string {
if x != nil {
return x.Encryption
}
return ""
}
func (x *Tape) GetCreateTime() int64 {
if x != nil {
return x.CreateTime
}
return 0
}
func (x *Tape) GetDestroyTime() int64 {
if x != nil && x.DestroyTime != nil {
return *x.DestroyTime
}
return 0
}
func (x *Tape) GetCapacityBytes() int64 {
if x != nil {
return x.CapacityBytes
}
return 0
}
func (x *Tape) GetWritenBytes() int64 {
if x != nil {
return x.WritenBytes
}
return 0
}
var File_tape_proto protoreflect.FileDescriptor
var file_tape_proto_rawDesc = []byte{
0x0a, 0x0a, 0x74, 0x61, 0x70, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x04, 0x74, 0x61,
0x70, 0x65, 0x22, 0x88, 0x02, 0x0a, 0x04, 0x54, 0x61, 0x70, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69,
0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x02, 0x69, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x62,
0x61, 0x72, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x62, 0x61,
0x72, 0x63, 0x6f, 0x64, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20,
0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x65, 0x6e, 0x63,
0x72, 0x79, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x65,
0x6e, 0x63, 0x72, 0x79, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1f, 0x0a, 0x0b, 0x63, 0x72, 0x65,
0x61, 0x74, 0x65, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x11, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a,
0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x26, 0x0a, 0x0c, 0x64, 0x65,
0x73, 0x74, 0x72, 0x6f, 0x79, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x12, 0x20, 0x01, 0x28, 0x03,
0x48, 0x00, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x74, 0x72, 0x6f, 0x79, 0x54, 0x69, 0x6d, 0x65, 0x88,
0x01, 0x01, 0x12, 0x25, 0x0a, 0x0e, 0x63, 0x61, 0x70, 0x61, 0x63, 0x69, 0x74, 0x79, 0x5f, 0x62,
0x79, 0x74, 0x65, 0x73, 0x18, 0x13, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0d, 0x63, 0x61, 0x70, 0x61,
0x63, 0x69, 0x74, 0x79, 0x42, 0x79, 0x74, 0x65, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x77, 0x72, 0x69,
0x74, 0x65, 0x6e, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x18, 0x14, 0x20, 0x01, 0x28, 0x03, 0x52,
0x0b, 0x77, 0x72, 0x69, 0x74, 0x65, 0x6e, 0x42, 0x79, 0x74, 0x65, 0x73, 0x42, 0x0f, 0x0a, 0x0d,
0x5f, 0x64, 0x65, 0x73, 0x74, 0x72, 0x6f, 0x79, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x42, 0x28, 0x5a,
0x26, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x61, 0x62, 0x63, 0x39,
0x35, 0x30, 0x33, 0x30, 0x39, 0x2f, 0x74, 0x61, 0x70, 0x65, 0x77, 0x72, 0x69, 0x74, 0x65, 0x72,
0x2f, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_tape_proto_rawDescOnce sync.Once
file_tape_proto_rawDescData = file_tape_proto_rawDesc
)
func file_tape_proto_rawDescGZIP() []byte {
file_tape_proto_rawDescOnce.Do(func() {
file_tape_proto_rawDescData = protoimpl.X.CompressGZIP(file_tape_proto_rawDescData)
})
return file_tape_proto_rawDescData
}
var file_tape_proto_msgTypes = make([]protoimpl.MessageInfo, 1)
var file_tape_proto_goTypes = []interface{}{
(*Tape)(nil), // 0: tape.Tape
}
var file_tape_proto_depIdxs = []int32{
0, // [0:0] is the sub-list for method output_type
0, // [0:0] is the sub-list for method input_type
0, // [0:0] is the sub-list for extension type_name
0, // [0:0] is the sub-list for extension extendee
0, // [0:0] is the sub-list for field type_name
}
func init() { file_tape_proto_init() }
func file_tape_proto_init() {
if File_tape_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_tape_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Tape); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
file_tape_proto_msgTypes[0].OneofWrappers = []interface{}{}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_tape_proto_rawDesc,
NumEnums: 0,
NumMessages: 1,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_tape_proto_goTypes,
DependencyIndexes: file_tape_proto_depIdxs,
MessageInfos: file_tape_proto_msgTypes,
}.Build()
File_tape_proto = out.File
file_tape_proto_rawDesc = nil
file_tape_proto_goTypes = nil
file_tape_proto_depIdxs = nil
}

15
entity/tape.proto Normal file
View File

@@ -0,0 +1,15 @@
syntax = "proto3";
package tape;
option go_package = "github.com/abc950309/tapewriter/entity";
message Tape {
int64 id = 1;
string barcode = 2;
string name = 3;
string encryption = 4;
int64 create_time = 17;
optional int64 destroy_time = 18;
int64 capacity_bytes = 19;
int64 writen_bytes = 20;
}

73
entity/utils.go Normal file
View File

@@ -0,0 +1,73 @@
package entity
import (
"database/sql/driver"
"fmt"
reflect "reflect"
sync "sync"
"github.com/modern-go/reflect2"
"google.golang.org/protobuf/proto"
)
var (
typeMap sync.Map
)
// Scan implement database/sql.Scanner
func Scan(dst proto.Message, src interface{}) error {
cacheKey := reflect2.RTypeOf(dst)
typ, has := loadType(cacheKey)
if !has {
ptrType := reflect.TypeOf(dst)
if ptrType.Kind() != reflect.Ptr {
return fmt.Errorf("scan dst is not an ptr, has= %T", dst)
}
typ = reflect2.Type2(ptrType.Elem())
storeType(cacheKey, typ)
}
typ.Set(dst, typ.New())
var buf []byte
switch v := src.(type) {
case string:
buf = []byte(v)
case []byte:
buf = v
case nil:
return nil
default:
return fmt.Errorf("process define extra scanner, unexpected type for i18n, %T", v)
}
if len(buf) == 0 {
return nil
}
if err := proto.Unmarshal(buf, dst); err != nil {
return fmt.Errorf("process define extra scanner, json unmarshal fail, %w", err)
}
return nil
}
// Value implement database/sql/driver.Valuer
func Value(src proto.Message) (driver.Value, error) {
buf, err := proto.Marshal(src)
if err != nil {
return nil, fmt.Errorf("process define extra valuer, json marshal fail, %w", err)
}
return buf, nil
}
func loadType(key uintptr) (reflect2.Type, bool) {
i, has := typeMap.Load(key)
if !has {
return nil, false
}
return i.(reflect2.Type), true
}
func storeType(key uintptr, typ reflect2.Type) {
typeMap.Store(key, typ)
}

131
executor/executor.go Normal file
View File

@@ -0,0 +1,131 @@
package executor
import (
"context"
"fmt"
"sort"
"sync"
"github.com/abc950309/tapewriter/entity"
"github.com/abc950309/tapewriter/library"
mapset "github.com/deckarep/golang-set/v2"
"gorm.io/gorm"
)
type Executor struct {
db *gorm.DB
lib *library.Library
devices []string
devicesLock sync.Mutex
availableDevices mapset.Set[string]
workDirectory string
encryptScript string
mkfsScript string
mountScript string
umountScript string
}
func New(
db *gorm.DB, lib *library.Library,
devices []string, workDirectory string,
encryptScript, mkfsScript, mountScript, umountScript string,
) *Executor {
return &Executor{
db: db,
lib: lib,
devices: devices,
availableDevices: mapset.NewThreadUnsafeSet(devices...),
encryptScript: encryptScript,
mkfsScript: mkfsScript,
mountScript: mountScript,
umountScript: umountScript,
}
}
func (e *Executor) AutoMigrate() error {
return e.db.AutoMigrate(ModelJob)
}
func (e *Executor) ListAvailableDevices() []string {
e.devicesLock.Lock()
defer e.devicesLock.Unlock()
devices := e.availableDevices.ToSlice()
sort.Slice(devices, func(i, j int) bool {
return devices[i] < devices[j]
})
return devices
}
func (e *Executor) occupyDevice(dev string) bool {
e.devicesLock.Lock()
defer e.devicesLock.Unlock()
if !e.availableDevices.Contains(dev) {
return false
}
e.availableDevices.Remove(dev)
return true
}
func (e *Executor) releaseDevice(dev string) {
e.devicesLock.Lock()
defer e.devicesLock.Unlock()
e.availableDevices.Add(dev)
}
func (e *Executor) Start(ctx context.Context, job *Job) error {
job.Status = entity.JobStatus_Processing
if _, err := e.SaveJob(ctx, job); err != nil {
return err
}
if state := job.State.GetArchive(); state != nil {
if err := e.startArchive(ctx, job); err != nil {
return err
}
return nil
}
return fmt.Errorf("unexpected state type, %T", job.State.State)
}
func (e *Executor) Submit(ctx context.Context, job *Job, param *entity.JobNextParam) error {
if job.Status != entity.JobStatus_Processing {
return fmt.Errorf("target job is not on processing, status= %s", job.Status)
}
if state := job.State.GetArchive(); state != nil {
exe, err := e.newArchiveExecutor(ctx, job)
if err != nil {
return err
}
exe.submit(param.GetArchive())
return nil
}
return fmt.Errorf("unexpected state type, %T", job.State.State)
}
func (e *Executor) Display(ctx context.Context, job *Job) (*entity.JobDisplay, error) {
if job.Status != entity.JobStatus_Processing {
return nil, fmt.Errorf("target job is not on processing, status= %s", job.Status)
}
if state := job.State.GetArchive(); state != nil {
display, err := e.getArchiveDisplay(ctx, job)
if err != nil {
return nil, err
}
return &entity.JobDisplay{Display: &entity.JobDisplay_Archive{Archive: display}}, nil
}
return nil, fmt.Errorf("unexpected state type, %T", job.State.State)
}

132
executor/job.go Normal file
View File

@@ -0,0 +1,132 @@
package executor
import (
"context"
"fmt"
"time"
"github.com/abc950309/tapewriter/entity"
"gorm.io/gorm"
)
var (
ModelJob = &Job{}
ErrJobNotFound = fmt.Errorf("get job: job not found")
)
type Job struct {
ID int64 `gorm:"primaryKey;autoIncrement"`
Status entity.JobStatus
Priority int64
State *entity.JobState
CreateTime time.Time
UpdateTime time.Time
}
func (j *Job) BeforeUpdate(tx *gorm.DB) error {
j.UpdateTime = time.Now()
if j.CreateTime.IsZero() {
j.CreateTime = j.UpdateTime
}
return nil
}
func (e *Executor) initJob(ctx context.Context, job *Job, param *entity.JobParam) error {
if p := param.GetArchive(); p != nil {
return e.initArchive(ctx, job, p)
}
return fmt.Errorf("unexpected param type, %T", param.Param)
}
func (e *Executor) CreateJob(ctx context.Context, job *Job, param *entity.JobParam) (*Job, error) {
if err := e.initJob(ctx, job, param); err != nil {
return nil, err
}
if r := e.db.WithContext(ctx).Create(job); r.Error != nil {
return nil, fmt.Errorf("save job fail, err= %w", r.Error)
}
return job, nil
}
func (e *Executor) SaveJob(ctx context.Context, job *Job) (*Job, error) {
if r := e.db.WithContext(ctx).Save(job); r.Error != nil {
return nil, fmt.Errorf("save job fail, err= %w", r.Error)
}
return job, nil
}
func (e *Executor) MGetJob(ctx context.Context, ids ...int64) (map[int64]*Job, error) {
if len(ids) == 0 {
return map[int64]*Job{}, nil
}
jobs := make([]*Job, 0, len(ids))
if r := e.db.WithContext(ctx).Where("id IN (?)", ids).Find(&jobs); r.Error != nil {
return nil, fmt.Errorf("list jobs fail, err= %w", r.Error)
}
result := make(map[int64]*Job, len(jobs))
for _, job := range jobs {
result[job.ID] = job
}
return result, nil
}
func (e *Executor) GetJob(ctx context.Context, id int64) (*Job, error) {
jobs, err := e.MGetJob(ctx, id)
if err != nil {
return nil, err
}
job, ok := jobs[id]
if !ok || job == nil {
return nil, ErrJobNotFound
}
return job, nil
}
// func (e *Executor) getNextJob(ctx context.Context) (*Job, error) {
// job := new(Job)
// if r := e.db.WithContext(ctx).
// Where("status = ?", entity.JobStatus_Pending).
// Order("priority DESC, create_time ASC").
// Limit(1).First(job); r.Error != nil {
// if errors.Is(r.Error, gorm.ErrRecordNotFound) {
// return nil, nil
// }
// return nil, r.Error
// }
// return job, nil
// }
func (e *Executor) ListJob(ctx context.Context, filter *entity.JobFilter) ([]*Job, error) {
db := e.db.WithContext(ctx)
if filter.Status != nil {
db.Where("status = ?", *filter.Status)
}
if filter.Limit != nil {
db.Limit(int(*filter.Limit))
} else {
db.Limit(20)
}
if filter.Offset != nil {
db.Offset(int(*filter.Offset))
}
db.Order("create_time DESC")
jobs := make([]*Job, 0, 20)
if r := db.Find(&jobs); r.Error != nil {
return nil, fmt.Errorf("list jobs fail, err= %w", r.Error)
}
return jobs, nil
}

View File

@@ -0,0 +1,22 @@
package executor
import (
"context"
"sync/atomic"
"github.com/abc950309/tapewriter/entity"
)
func (e *Executor) getArchiveDisplay(ctx context.Context, job *Job) (*entity.JobDisplayArchive, error) {
display := new(entity.JobDisplayArchive)
if exe := e.getArchiveExecutor(ctx, job); exe != nil && exe.progress != nil {
display.CopyedBytes = atomic.LoadInt64(&exe.progress.bytes)
display.CopyedFiles = atomic.LoadInt64(&exe.progress.files)
display.TotalBytes = atomic.LoadInt64(&exe.progress.totalBytes)
display.TotalFiles = atomic.LoadInt64(&exe.progress.totalFiles)
display.Speed = atomic.LoadInt64(&exe.progress.speed)
}
return display, nil
}

357
executor/job_archive_exe.go Normal file
View File

@@ -0,0 +1,357 @@
package executor
import (
"context"
"encoding/hex"
"fmt"
"io"
"os"
"os/exec"
"path"
"sort"
"sync"
"sync/atomic"
"time"
"github.com/abc950309/acp"
"github.com/abc950309/tapewriter/entity"
"github.com/abc950309/tapewriter/library"
"github.com/abc950309/tapewriter/tools"
mapset "github.com/deckarep/golang-set/v2"
"github.com/sirupsen/logrus"
)
var (
runningArchives sync.Map
)
func (e *Executor) getArchiveExecutor(ctx context.Context, job *Job) *jobArchiveExecutor {
if running, has := runningArchives.Load(job.ID); has {
return running.(*jobArchiveExecutor)
}
return nil
}
func (e *Executor) newArchiveExecutor(ctx context.Context, job *Job) (*jobArchiveExecutor, error) {
if exe := e.getArchiveExecutor(ctx, job); exe != nil {
return exe, nil
}
logFile, err := e.newLogWriter(job.ID)
if err != nil {
return nil, fmt.Errorf("get log writer fail, %w", err)
}
logger := logrus.New()
logger.SetOutput(io.MultiWriter(os.Stderr, logFile))
exe := &jobArchiveExecutor{
ctx: context.Background(),
exe: e,
job: job,
state: job.State.GetArchive(),
progress: new(progress),
logFile: logFile,
logger: logger,
}
runningArchives.Store(job.ID, exe)
return exe, nil
}
type jobArchiveExecutor struct {
ctx context.Context
exe *Executor
job *Job
stateLock sync.Mutex
state *entity.JobStateArchive
progress *progress
logFile *os.File
logger *logrus.Logger
}
func (a *jobArchiveExecutor) submit(param *entity.JobArchiveNextParam) {
if err := a.handle(param); err != nil {
a.logger.WithContext(a.ctx).Infof("handler param fail, err= %w", err)
}
}
func (a *jobArchiveExecutor) handle(param *entity.JobArchiveNextParam) error {
if p := param.GetCopying(); p != nil {
if err := a.switchStep(entity.JobArchiveStep_Copying, entity.JobStatus_Processing, mapset.NewThreadUnsafeSet(entity.JobArchiveStep_WaitForTape)); err != nil {
return err
}
go tools.Wrap(a.ctx, func() {
_, err := a.makeTape(p.Device, p.Barcode, p.Name)
if err != nil {
a.logger.WithContext(a.ctx).WithError(err).Errorf("make type has error, barcode= '%s' name= '%s'", p.Barcode, p.Name)
}
})
return nil
}
if p := param.GetWaitForTape(); p != nil {
return a.switchStep(entity.JobArchiveStep_WaitForTape, entity.JobStatus_Processing, mapset.NewThreadUnsafeSet(entity.JobArchiveStep_Pending, entity.JobArchiveStep_Copying))
}
if p := param.GetFinished(); p != nil {
if err := a.switchStep(entity.JobArchiveStep_Finished, entity.JobStatus_Completed, mapset.NewThreadUnsafeSet(entity.JobArchiveStep_Copying)); err != nil {
return err
}
a.logFile.Close()
runningArchives.Delete(a.job.ID)
return nil
}
return nil
}
func (a *jobArchiveExecutor) makeTape(device, barcode, name string) (*library.Tape, error) {
if !a.exe.occupyDevice(device) {
return nil, fmt.Errorf("device is using, device= %s", device)
}
defer a.exe.releaseDevice(device)
defer a.makeTapeFinished()
encryption, keyPath, keyRecycle, err := a.exe.newKey()
if err != nil {
return nil, err
}
defer func() {
time.Sleep(time.Second)
keyRecycle()
}()
if err := runCmd(a.logger, a.exe.makeEncryptCmd(a.ctx, device, keyPath, barcode, name)); err != nil {
return nil, fmt.Errorf("run encrypt script fail, %w", err)
}
mkfsCmd := exec.CommandContext(a.ctx, a.exe.mkfsScript)
mkfsCmd.Env = append(mkfsCmd.Env, fmt.Sprintf("DEVICE=%s", device), fmt.Sprintf("TAPE_BARCODE=%s", barcode), fmt.Sprintf("TAPE_NAME=%s", name))
if err := runCmd(a.logger, mkfsCmd); err != nil {
return nil, fmt.Errorf("run mkfs script fail, %w", err)
}
mountPoint, err := os.MkdirTemp("", "*.ltfs")
if err != nil {
return nil, fmt.Errorf("create temp mountpoint, %w", err)
}
mountCmd := exec.CommandContext(a.ctx, a.exe.mountScript)
mountCmd.Env = append(mountCmd.Env, fmt.Sprintf("DEVICE=%s", device), fmt.Sprintf("MOUNT_POINT=%s", mountPoint))
if err := runCmd(a.logger, mountCmd); err != nil {
return nil, fmt.Errorf("run mount script fail, %w", err)
}
defer func() {
umountCmd := exec.CommandContext(a.ctx, a.exe.umountScript)
umountCmd.Env = append(umountCmd.Env, fmt.Sprintf("MOUNT_POINT=%s", mountPoint))
if err := runCmd(a.logger, umountCmd); err != nil {
a.logger.WithContext(a.ctx).WithError(err).Errorf("run umount script fail, %s", mountPoint)
return
}
if err := os.Remove(mountPoint); err != nil {
a.logger.WithContext(a.ctx).WithError(err).Errorf("remove mount point fail, %s", mountPoint)
return
}
}()
opts := make([]acp.Option, 0, 4)
for _, source := range a.state.Sources {
if source.Status == entity.CopyStatus_Submited {
continue
}
opts = append(opts, acp.AccurateSource(source.Source.Base, source.Source.Path))
}
opts = append(opts, acp.Target(mountPoint))
opts = append(opts, acp.WithHash(true))
opts = append(opts, acp.SetToDevice(acp.LinearDevice(true)))
opts = append(opts, acp.WithLogger(a.logger))
reportHander, reportGetter := acp.NewReportGetter()
opts = append(opts, acp.WithEventHandler(reportHander))
opts = append(opts, acp.WithEventHandler(func(ev acp.Event) {
switch e := ev.(type) {
case *acp.EventUpdateCount:
atomic.StoreInt64(&a.progress.totalBytes, e.Bytes)
atomic.StoreInt64(&a.progress.totalFiles, e.Files)
return
case *acp.EventUpdateProgress:
atomic.StoreInt64(&a.progress.bytes, e.Bytes)
atomic.StoreInt64(&a.progress.files, e.Files)
return
case *acp.EventUpdateJob:
job := e.Job
src := entity.NewSourceFromACPJob(job)
var targetStatus entity.CopyStatus
switch job.Status {
case "pending":
targetStatus = entity.CopyStatus_Pending
case "preparing":
a.logger.Infof("file '%s' starts to prepare for copy, size= %d", src.RealPath(), job.Size)
targetStatus = entity.CopyStatus_Running
case "finished":
a.logger.Infof("file '%s' copy finished, size= %d", src.RealPath(), job.Size)
targetStatus = entity.CopyStatus_Staged
default:
return
}
a.stateLock.Lock()
defer a.stateLock.Unlock()
idx := sort.Search(len(a.state.Sources), func(idx int) bool {
return src.Compare(a.state.Sources[idx].Source) <= 0
})
target := a.state.Sources[idx]
if target == nil || !src.Equal(target.Source) {
return
}
target.Status = targetStatus
if _, err := a.exe.SaveJob(a.ctx, a.job); err != nil {
logrus.WithContext(a.ctx).Infof("save job for update file fail, name= %s", job.Base+path.Join(job.Path...))
}
return
}
}))
copyer, err := acp.New(a.ctx, opts...)
if err != nil {
return nil, fmt.Errorf("start copy fail, %w", err)
}
copyer.Wait()
report := reportGetter()
sort.Slice(report.Jobs, func(i, j int) bool {
return entity.NewSourceFromACPJob(report.Jobs[i]).Compare(entity.NewSourceFromACPJob(report.Jobs[j])) < 0
})
filteredJobs := make([]*acp.Job, 0, len(report.Jobs))
files := make([]*library.TapeFile, 0, len(report.Jobs))
for _, job := range report.Jobs {
if len(job.SuccessTargets) == 0 {
continue
}
if !job.Mode.IsRegular() {
continue
}
hash, err := hex.DecodeString(job.SHA256)
if err != nil {
return nil, fmt.Errorf("decode sha256 fail, err= %w", err)
}
files = append(files, &library.TapeFile{
Path: path.Join(job.Path...),
Size: job.Size,
Mode: job.Mode,
ModTime: job.ModTime,
WriteTime: job.WriteTime,
Hash: hash,
})
filteredJobs = append(filteredJobs, job)
}
tape, err := a.exe.lib.CreateTape(a.ctx, &library.Tape{
Barcode: barcode,
Name: name,
Encryption: encryption,
CreateTime: time.Now(),
}, files)
if err != nil {
return nil, fmt.Errorf("create tape fail, barcode= '%s' name= '%s', %w", barcode, name, err)
}
if err := a.exe.lib.TrimFiles(a.ctx); err != nil {
a.logger.WithError(err).Warnf("trim library files fail")
}
if err := a.markSourcesAsSubmited(filteredJobs); err != nil {
a.submit(&entity.JobArchiveNextParam{Param: &entity.JobArchiveNextParam_WaitForTape{WaitForTape: &entity.JobArchiveWaitForTapeParam{}}})
return nil, err
}
return tape, nil
}
func (a *jobArchiveExecutor) switchStep(target entity.JobArchiveStep, status entity.JobStatus, expect mapset.Set[entity.JobArchiveStep]) error {
a.stateLock.Lock()
defer a.stateLock.Unlock()
if !expect.Contains(a.state.Step) {
return fmt.Errorf("unexpected current step, target= '%s' expect= '%s' has= '%s'", target, expect, a.state.Step)
}
a.state.Step = target
a.job.Status = status
if _, err := a.exe.SaveJob(a.ctx, a.job); err != nil {
return fmt.Errorf("switch to step copying, save job fail, %w", err)
}
return nil
}
func (a *jobArchiveExecutor) markSourcesAsSubmited(jobs []*acp.Job) error {
a.stateLock.Lock()
defer a.stateLock.Unlock()
searchableSource := a.state.Sources[:]
for _, job := range jobs {
src := entity.NewSourceFromACPJob(job)
for idx, testSrc := range searchableSource {
if src.Compare(testSrc.Source) <= 0 {
searchableSource = searchableSource[idx:]
break
}
}
target := searchableSource[0]
if target == nil || !src.Equal(target.Source) {
continue
}
target.Status = entity.CopyStatus_Submited
}
if _, err := a.exe.SaveJob(a.ctx, a.job); err != nil {
return fmt.Errorf("mark sources as submited, save job, %w", err)
}
atomic.StoreInt64(&a.progress.bytes, 0)
atomic.StoreInt64(&a.progress.files, 0)
atomic.StoreInt64(&a.progress.totalBytes, 0)
atomic.StoreInt64(&a.progress.totalFiles, 0)
return nil
}
func (a *jobArchiveExecutor) getTodoSources() int {
a.stateLock.Lock()
defer a.stateLock.Unlock()
var todo int
for _, s := range a.state.Sources {
if s.Status == entity.CopyStatus_Submited {
continue
}
todo++
}
return todo
}
func (a *jobArchiveExecutor) makeTapeFinished() {
if a.getTodoSources() > 0 {
a.submit(&entity.JobArchiveNextParam{Param: &entity.JobArchiveNextParam_WaitForTape{WaitForTape: &entity.JobArchiveWaitForTapeParam{}}})
} else {
a.submit(&entity.JobArchiveNextParam{Param: &entity.JobArchiveNextParam_Finished{Finished: &entity.JobArchiveFinishedParam{}}})
}
}

View File

@@ -0,0 +1,74 @@
package executor
import (
"context"
"fmt"
"os"
"sort"
"github.com/abc950309/acp"
"github.com/abc950309/tapewriter/entity"
)
func (e *Executor) initArchive(ctx context.Context, job *Job, param *entity.JobParamArchive) error {
var err error
sources := make([]*entity.SourceState, 0, len(param.Sources)*8)
for _, src := range param.Sources {
sources, err = walk(ctx, src, sources)
if err != nil {
return err
}
}
sort.Slice(sources, func(i, j int) bool {
return sources[i].Source.Compare(sources[j].Source) < 0
})
for idx, src := range sources {
if idx > 0 && sources[idx-1].Source.Equal(src.Source) {
return fmt.Errorf("have multi file with same path, path= %s", src.Source.RealPath())
}
}
job.State = &entity.JobState{State: &entity.JobState_Archive{Archive: &entity.JobStateArchive{
Step: entity.JobArchiveStep_Pending,
Sources: sources,
}}}
return nil
}
func walk(ctx context.Context, src *entity.Source, sources []*entity.SourceState) ([]*entity.SourceState, error) {
path := src.RealPath()
stat, err := os.Stat(path)
if err != nil {
return nil, fmt.Errorf("walk get stat, path= '%s', %w", path, err)
}
mode := stat.Mode()
if mode.IsRegular() {
if stat.Name() == ".DS_Store" {
return sources, nil
}
return append(sources, &entity.SourceState{
Source: src,
Size: stat.Size(),
Status: entity.CopyStatus_Pending,
}), nil
}
if mode&acp.UnexpectFileMode != 0 {
return sources, nil
}
files, err := os.ReadDir(path)
if err != nil {
return nil, fmt.Errorf("walk read dir, path= '%s', %w", path, err)
}
for _, file := range files {
sources, err = walk(ctx, src.Append(file.Name()), sources)
if err != nil {
return nil, err
}
}
return sources, nil
}

View File

@@ -0,0 +1,15 @@
package executor
import (
"context"
"github.com/abc950309/tapewriter/entity"
)
func (e *Executor) startArchive(ctx context.Context, job *Job) error {
return e.Submit(ctx, job, &entity.JobNextParam{Param: &entity.JobNextParam_Archive{
Archive: &entity.JobArchiveNextParam{Param: &entity.JobArchiveNextParam_WaitForTape{
WaitForTape: &entity.JobArchiveWaitForTapeParam{},
}},
}})
}

59
executor/job_restore.go Normal file
View File

@@ -0,0 +1,59 @@
package executor
import (
"context"
"fmt"
"os"
"os/exec"
"time"
"github.com/abc950309/tapewriter/library"
"github.com/sirupsen/logrus"
)
func (e *Executor) RestoreLoadTape(ctx context.Context, device string, tape *library.Tape) error {
if !e.occupyDevice(device) {
return fmt.Errorf("device is using, device= %s", device)
}
defer e.releaseDevice(device)
keyPath, keyRecycle, err := e.restoreKey(tape.Encryption)
if err != nil {
return err
}
defer func() {
time.Sleep(time.Second)
keyRecycle()
}()
logger := logrus.StandardLogger()
if err := runCmd(logger, e.makeEncryptCmd(ctx, device, keyPath, tape.Barcode, tape.Name)); err != nil {
return fmt.Errorf("run encrypt script fail, %w", err)
}
mountPoint, err := os.MkdirTemp("", "*.ltfs")
if err != nil {
return fmt.Errorf("create temp mountpoint, %w", err)
}
mountCmd := exec.CommandContext(ctx, e.mountScript)
mountCmd.Env = append(mountCmd.Env, fmt.Sprintf("DEVICE=%s", device), fmt.Sprintf("MOUNT_POINT=%s", mountPoint))
if err := runCmd(logger, mountCmd); err != nil {
return fmt.Errorf("run mount script fail, %w", err)
}
// defer func() {
// umountCmd := exec.CommandContext(ctx, e.umountScript)
// umountCmd.Env = append(umountCmd.Env, fmt.Sprintf("MOUNT_POINT=%s", mountPoint))
// if err := runCmd(logger, umountCmd); err != nil {
// logger.WithContext(ctx).WithError(err).Errorf("run umount script fail, %s", mountPoint)
// return
// }
// if err := os.Remove(mountPoint); err != nil {
// logger.WithContext(ctx).WithError(err).Errorf("remove mount point fail, %s", mountPoint)
// return
// }
// }()
return nil
}

54
executor/key.go Normal file
View File

@@ -0,0 +1,54 @@
package executor
import (
"context"
"crypto/rand"
"encoding/hex"
"fmt"
"os"
"os/exec"
"strings"
)
const (
keySize = 256
keyV1Header = "v1:"
)
// restoreKey returns (path, recycle, error)
func (e *Executor) restoreKey(str string) (string, func(), error) {
file, err := os.CreateTemp("", "*.key")
if err != nil {
return "", nil, fmt.Errorf("restore key, create temp, %w", err)
}
defer file.Close()
if strings.HasPrefix(str, keyV1Header) {
if _, err := file.WriteString(str[len(keyV1Header):]); err != nil {
return "", nil, fmt.Errorf("restore key, write key, %w", err)
}
}
return file.Name(), func() { os.Remove(file.Name()) }, nil
}
// newKey returns (key, path, recycle, error)
func (e *Executor) newKey() (string, string, func(), error) {
keyBuf := make([]byte, keySize/8)
if _, err := rand.Reader.Read(keyBuf); err != nil {
return "", "", nil, fmt.Errorf("gen key fail, %w", err)
}
key := keyV1Header + hex.EncodeToString(keyBuf)
path, recycle, err := e.restoreKey(key)
if err != nil {
return "", "", nil, err
}
return key, path, recycle, nil
}
func (e *Executor) makeEncryptCmd(ctx context.Context, device, keyPath, barcode, name string) *exec.Cmd {
cmd := exec.CommandContext(ctx, e.encryptScript)
cmd.Env = append(cmd.Env, fmt.Sprintf("DEVICE=%s", device), fmt.Sprintf("KEY_FILE=%s", keyPath), fmt.Sprintf("TAPE_BARCODE=%s", barcode), fmt.Sprintf("TAPE_NAME=%s", name))
return cmd
}

50
executor/log.go Normal file
View File

@@ -0,0 +1,50 @@
package executor
import (
"errors"
"fmt"
"os"
"os/exec"
"path"
"github.com/sirupsen/logrus"
)
func (e *Executor) logPath(jobID int64) (string, string) {
return path.Join(e.workDirectory, "job-logs"), fmt.Sprintf("%d.log", jobID)
}
func (e *Executor) newLogWriter(jobID int64) (*os.File, error) {
dir, filename := e.logPath(jobID)
if err := os.MkdirAll(dir, 0755); err != nil {
return nil, fmt.Errorf("make job log dir fail, path= '%s', err= %w", dir, err)
}
file, err := os.OpenFile(path.Join(dir, filename), os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644)
if err != nil {
return nil, fmt.Errorf("create file fail, path= '%s', err= %w", path.Join(dir, filename), err)
}
return file, nil
}
func (e *Executor) NewLogReader(jobID int64) (*os.File, error) {
dir, filename := e.logPath(jobID)
file, err := os.OpenFile(path.Join(dir, filename), os.O_RDONLY, 0644)
if err != nil {
if errors.Is(err, os.ErrNotExist) {
return nil, nil
}
return nil, fmt.Errorf("create file")
}
return file, nil
}
func runCmd(logger *logrus.Logger, cmd *exec.Cmd) error {
writer := logger.WriterLevel(logrus.InfoLevel)
cmd.Stdout = writer
cmd.Stderr = writer
return cmd.Run()
}

8
executor/progress.go Normal file
View File

@@ -0,0 +1,8 @@
package executor
type progress struct {
speed int64
totalBytes, totalFiles int64
bytes, files int64
}

11
external/external.go vendored Normal file
View File

@@ -0,0 +1,11 @@
package external
import "github.com/abc950309/tapewriter/library"
type External struct {
lib *library.Library
}
func New(lib *library.Library) *External {
return &External{lib: lib}
}

63
external/from_json.go vendored Normal file
View File

@@ -0,0 +1,63 @@
package external
import (
"context"
"encoding/hex"
"encoding/json"
"fmt"
"io"
"path"
"github.com/abc950309/acp"
"github.com/abc950309/tapewriter/library"
)
func (e *External) ImportACPReport(ctx context.Context, barname, name, encryption string, reader io.Reader) error {
report := new(acp.Report)
if err := json.NewDecoder(reader).Decode(report); err != nil {
return err
}
files := make([]*library.TapeFile, 0, 16)
for _, f := range report.Jobs {
if len(f.SuccessTargets) == 0 {
continue
}
if !f.Mode.IsRegular() {
continue
}
hash, err := hex.DecodeString(f.SHA256)
if err != nil {
return fmt.Errorf("decode sha256 fail, err= %w", err)
}
files = append(files, &library.TapeFile{
Path: path.Join(f.Path...),
Size: f.Size,
Mode: f.Mode,
ModTime: f.ModTime,
WriteTime: f.WriteTime,
Hash: hash,
})
}
if len(files) == 0 {
return fmt.Errorf("cannot found files from report")
}
if _, err := e.lib.CreateTape(ctx, &library.Tape{
Barcode: barname,
Name: name,
Encryption: encryption,
CreateTime: files[0].WriteTime,
}, files); err != nil {
return fmt.Errorf("save tape, err= %w", err)
}
if err := e.lib.TrimFiles(ctx); err != nil {
return err
}
return nil
}

24
frontend/.gitignore vendored Normal file
View File

@@ -0,0 +1,24 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
lerna-debug.log*
node_modules
dist
dist-ssr
*.local
# Editor directories and files
.vscode/*
!.vscode/extensions.json
.idea
.DS_Store
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?

View File

@@ -0,0 +1,3 @@
{
"printWidth": 160
}

16
frontend/index.html Normal file
View File

@@ -0,0 +1,16 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Tape Writer</title>
<script type="text/javascript">
window.apiBase = "%%API_BASE%%";
</script>
</head>
<body>
<div id="root"></div>
<script type="module" src="/src/main.tsx"></script>
</body>
</html>

46
frontend/package.json Normal file
View File

@@ -0,0 +1,46 @@
{
"name": "tape-manager",
"private": true,
"version": "0.0.0",
"type": "module",
"scripts": {
"dev": "vite",
"build": "tsc && vite build",
"preview": "vite preview",
"gen-proto": "protoc --ts_out ./src/entity --proto_path ../entity/ `ls ../entity/*.proto` && ./src/entity/gen_index.sh"
},
"dependencies": {
"@emotion/react": "^11.10.4",
"@emotion/styled": "^11.10.4",
"@fortawesome/fontawesome-svg-core": "^1.2.32",
"@fortawesome/free-solid-svg-icons": "^5.13.1",
"@fortawesome/react-fontawesome": "^0.1.12",
"@mui/icons-material": "^5.10.16",
"@mui/material": "^5.10.9",
"@mui/styled-engine": "^5.10.8",
"@protobuf-ts/grpcweb-transport": "^2.8.2",
"@protobuf-ts/runtime": "^2.8.2",
"@protobuf-ts/runtime-rpc": "^2.8.2",
"chonky": "^2.3.2",
"chonky-icon-fontawesome": "^2.3.2",
"fast-text-encoding": "^1.0.6",
"filesize": "^10.0.5",
"moment": "^2.29.4",
"react": "^18.2.0",
"react-dnd": "^11.1.3",
"react-dnd-html5-backend": "^11.1.3",
"react-dom": "^18.2.0",
"react-is": "^18.2.0"
},
"devDependencies": {
"@protobuf-ts/plugin": "^2.8.2",
"@types/react": "^18.0.17",
"@types/react-dom": "^18.0.6",
"@vitejs/plugin-react": "^2.1.0",
"less": "^4.1.3",
"prettier": "2.7.1",
"tsdef": "^0.0.14",
"typescript": "^4.6.4",
"vite": "^3.1.0"
}
}

2495
frontend/pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

1
frontend/public/vite.svg Normal file
View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="31.88" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 257"><defs><linearGradient id="IconifyId1813088fe1fbc01fb466" x1="-.828%" x2="57.636%" y1="7.652%" y2="78.411%"><stop offset="0%" stop-color="#41D1FF"></stop><stop offset="100%" stop-color="#BD34FE"></stop></linearGradient><linearGradient id="IconifyId1813088fe1fbc01fb467" x1="43.376%" x2="50.316%" y1="2.242%" y2="89.03%"><stop offset="0%" stop-color="#FFEA83"></stop><stop offset="8.333%" stop-color="#FFDD35"></stop><stop offset="100%" stop-color="#FFA800"></stop></linearGradient></defs><path fill="url(#IconifyId1813088fe1fbc01fb466)" d="M255.153 37.938L134.897 252.976c-2.483 4.44-8.862 4.466-11.382.048L.875 37.958c-2.746-4.814 1.371-10.646 6.827-9.67l120.385 21.517a6.537 6.537 0 0 0 2.322-.004l117.867-21.483c5.438-.991 9.574 4.796 6.877 9.62Z"></path><path fill="url(#IconifyId1813088fe1fbc01fb467)" d="M185.432.063L96.44 17.501a3.268 3.268 0 0 0-2.634 3.014l-5.474 92.456a3.268 3.268 0 0 0 3.997 3.378l24.777-5.718c2.318-.535 4.413 1.507 3.936 3.838l-7.361 36.047c-.495 2.426 1.782 4.5 4.151 3.78l15.304-4.649c2.372-.72 4.652 1.36 4.15 3.788l-11.698 56.621c-.732 3.542 3.979 5.473 5.943 2.437l1.313-2.028l72.516-144.72c1.215-2.423-.88-5.186-3.54-4.672l-25.505 4.922c-2.396.462-4.435-1.77-3.759-4.114l16.646-57.705c.677-2.35-1.37-4.583-3.769-4.113Z"></path></svg>

After

Width:  |  Height:  |  Size: 1.5 KiB

26
frontend/src/actions.ts Normal file
View File

@@ -0,0 +1,26 @@
import { FileData, FileArray, FileAction } from "chonky";
import { defineFileAction } from "chonky";
type RenameFileState = {
contextMenuTriggerFile: FileData;
instanceId: string;
selectedFiles: FileArray;
selectedFilesForAction: FileArray;
};
export const RenameFileAction = defineFileAction({
id: "rename_file",
requiresSelection: true,
button: {
name: "Rename File",
toolbar: true,
contextMenu: true,
group: "Actions",
icon: "edit",
},
__extraStateType: {} as RenameFileState,
} as FileAction);
export const RefreshListAction = defineFileAction({
id: "refresh_list",
} as FileAction);

176
frontend/src/api.ts Normal file
View File

@@ -0,0 +1,176 @@
import { FileData } from "chonky";
import { GrpcWebFetchTransport } from "@protobuf-ts/grpcweb-transport";
import { ServiceClient, File, SourceFile } from "./entity";
import moment from "moment";
const apiBase: string = (() => {
const base = (window as any).apiBase as string;
if (!base || base === "%%API_BASE%%") {
return "http://127.0.0.1:8080/services";
}
return base;
})();
export const ModeDir = 2147483648n; // d: is a directory
export const Root: FileData = {
id: "0",
name: "Root",
isDir: true,
openable: true,
selectable: true,
draggable: true,
droppable: true,
};
export const sleep = (ms: number): Promise<null> =>
new Promise((resolve) => {
setTimeout(resolve, ms);
});
const transport = new GrpcWebFetchTransport({
baseUrl: apiBase,
format: "binary",
});
export const cli = new ServiceClient(transport);
(window as any).cli = cli;
export function convertFiles(files: Array<File>): FileData[] {
return files.map((file) => {
const isDir = (file.mode & ModeDir) > 0;
return {
id: getID(file),
name: file.name,
ext: extname(file.name),
isDir,
isHidden: file.name.startsWith("."),
openable: true,
selectable: true,
draggable: true,
droppable: isDir,
size: Number(file.size),
modDate: moment.unix(Number(file.modTime)).toDate(),
};
});
}
export function convertSourceFiles(files: Array<SourceFile>): FileData[] {
return files.map((file) => {
const isDir = (file.mode & ModeDir) > 0;
return {
id: getID(file),
name: file.name,
ext: extname(file.name),
isDir,
isHidden: file.name.startsWith("."),
openable: isDir,
selectable: true,
draggable: true,
droppable: isDir,
size: Number(file.size),
modDate: moment.unix(Number(file.modTime)).toDate(),
};
});
}
function extname(filename: string): string {
const idx = filename.lastIndexOf(".");
if (idx < 0) {
return "";
}
return filename.slice(idx);
}
function getID(file: File | SourceFile): string {
if ("id" in file) {
return `${file.id}`;
}
return file.path;
}
// export interface GetFileResponse {
// file: File;
// positions: Position[];
// children: FileArray<File>;
// }
// export const getFile = async (id: string) => {
// const result = await fetch(`${Domain}/api/v1/file/${id}`);
// const body: GetFileResponse = await result.json();
// return body;
// };
// export interface ListFileParentsResponse {
// parents: FileArray<File>;
// }
// export const listFileParents = async (id: string) => {
// const result = await fetch(`${Domain}/api/v1/file/${id}/_parent`);
// const body: ListFileParentsResponse = await result.json();
// return [Root, ...body.parents];
// };
// export interface SetFileResponse {
// file?: File;
// result?: string;
// }
// export const editFile = async (id: string, payload: Partial<File>) => {
// const result = await fetch(`${Domain}/api/v1/file/${id}`, {
// method: "POST",
// headers: {
// "Content-Type": "application/json",
// },
// body: JSON.stringify(payload),
// });
// const body: SetFileResponse = await result.json();
// return body;
// };
// export const createFolder = async (
// parentID: string,
// payload: Partial<File>
// ) => {
// const result = await fetch(`${Domain}/api/v1/file/${parentID}/`, {
// method: "PUT",
// headers: {
// "Content-Type": "application/json",
// },
// body: JSON.stringify(payload),
// });
// const body: SetFileResponse = await result.json();
// return body.file;
// };
// export const deleteFolder = async (ids: string[]) => {
// const result = await fetch(`${Domain}/api/v1/file/`, {
// method: "DELETE",
// headers: {
// "Content-Type": "application/json",
// },
// body: JSON.stringify({ fileids: ids }),
// });
// const body: SetFileResponse = await result.json();
// return body;
// };
// interface GetTapeResponse {
// tape: Tape;
// }
// export const getTape = async (id: number) => {
// const result = await fetch(`${Domain}/api/v1/tape/${id}`);
// const body: GetTapeResponse = await result.json();
// return body;
// };
// interface GetSourceResponse {
// file: File;
// chain: File[];
// children: FileArray<File>;
// }
// export const getSource = async (path: string) => {
// const result = await fetch(`${Domain}/api/v1/source/${path}`);
// const body: GetSourceResponse = await result.json();
// return body;
// };

63
frontend/src/app.less Normal file
View File

@@ -0,0 +1,63 @@
#app {
height: 100%;
width: 100%;
margin: 0;
text-align: center;
box-sizing: border-box;
display: -webkit-flex; /* Safari */
display: flex;
flex-direction: column;
.tabs {
background-color: #ffffff;
}
}
.browser-box {
background-color: #efefef;
padding: 0.5em;
height: 100%;
box-sizing: border-box;
.browser-container {
margin: 0;
box-sizing: border-box;
height: 100%;
.browser {
box-sizing: border-box;
padding-right: 0.5em;
&:last-child {
padding-right: 0;
}
.job-detail {
.app-MuiGrid-item {
padding-top: 1em;
padding-left: 1em;
}
margin-bottom: 0.5em;
&:last-child {
margin-bottom: 0;
}
}
}
}
}
.view-log-dialog {
.app-MuiDialog-paperScrollPaper {
height: 100%;
}
pre {
white-space: pre-wrap; /* Since CSS 2.1 */
white-space: -moz-pre-wrap; /* Mozilla, since 1999 */
white-space: -pre-wrap; /* Opera 4-6 */
white-space: -o-pre-wrap; /* Opera 7 */
word-wrap: break-word; /* Internet Explorer 5.5+ */
}
}

76
frontend/src/app.tsx Normal file
View File

@@ -0,0 +1,76 @@
import { useEffect } from "react";
import { useState, useCallback } from "react";
import { ChangeEvent } from "react";
import Tabs from "@mui/material/Tabs";
import Tab from "@mui/material/Tab";
import { createTheme, ThemeProvider, styled } from "@mui/material/styles";
import { FileBrowser, FileBrowserType } from "./file";
import { BackupBrowser, BackupType } from "./backup";
import { JobsBrowser, JobsType } from "./jobs";
import "./app.less";
import { sleep } from "./api";
import { Nullable } from "tsdef";
// import reactLogo from './assets/react.svg'
// <img src={reactLogo} className="logo react" alt="React logo" />
const theme = createTheme({});
const typeToElement = (type: string) => {
switch (type) {
case FileBrowserType:
return <FileBrowser />;
case BackupType:
return <BackupBrowser />;
case JobsType:
return <JobsBrowser />;
default:
return null;
}
};
const App = () => {
const [tabValue, setTabValue] = useState(FileBrowserType);
const [inner, setInner] = useState<Nullable<JSX.Element>>(null);
const setType = useCallback(
(newValue: string) => {
(async () => {
setTabValue(newValue);
setInner(null);
await sleep(0);
setInner(typeToElement(newValue));
})();
},
[setTabValue, setInner]
);
const handleTabChange = useCallback(
(_: ChangeEvent<{}>, newValue: string) => {
setType(newValue);
},
[setTabValue]
);
useEffect(() => {
setType(FileBrowserType);
}, []);
return (
<div id="app">
<ThemeProvider theme={theme}>
<Tabs className="tabs" value={tabValue} onChange={handleTabChange} indicatorColor="secondary">
<Tab label="File" value={FileBrowserType} />
<Tab label="Source" value={BackupType} />
<Tab label="Jobs" value={JobsType} />
</Tabs>
</ThemeProvider>
{inner}
</div>
);
};
export default App;

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="35.93" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 228"><path fill="#00D8FF" d="M210.483 73.824a171.49 171.49 0 0 0-8.24-2.597c.465-1.9.893-3.777 1.273-5.621c6.238-30.281 2.16-54.676-11.769-62.708c-13.355-7.7-35.196.329-57.254 19.526a171.23 171.23 0 0 0-6.375 5.848a155.866 155.866 0 0 0-4.241-3.917C100.759 3.829 77.587-4.822 63.673 3.233C50.33 10.957 46.379 33.89 51.995 62.588a170.974 170.974 0 0 0 1.892 8.48c-3.28.932-6.445 1.924-9.474 2.98C17.309 83.498 0 98.307 0 113.668c0 15.865 18.582 31.778 46.812 41.427a145.52 145.52 0 0 0 6.921 2.165a167.467 167.467 0 0 0-2.01 9.138c-5.354 28.2-1.173 50.591 12.134 58.266c13.744 7.926 36.812-.22 59.273-19.855a145.567 145.567 0 0 0 5.342-4.923a168.064 168.064 0 0 0 6.92 6.314c21.758 18.722 43.246 26.282 56.54 18.586c13.731-7.949 18.194-32.003 12.4-61.268a145.016 145.016 0 0 0-1.535-6.842c1.62-.48 3.21-.974 4.76-1.488c29.348-9.723 48.443-25.443 48.443-41.52c0-15.417-17.868-30.326-45.517-39.844Zm-6.365 70.984c-1.4.463-2.836.91-4.3 1.345c-3.24-10.257-7.612-21.163-12.963-32.432c5.106-11 9.31-21.767 12.459-31.957c2.619.758 5.16 1.557 7.61 2.4c23.69 8.156 38.14 20.213 38.14 29.504c0 9.896-15.606 22.743-40.946 31.14Zm-10.514 20.834c2.562 12.94 2.927 24.64 1.23 33.787c-1.524 8.219-4.59 13.698-8.382 15.893c-8.067 4.67-25.32-1.4-43.927-17.412a156.726 156.726 0 0 1-6.437-5.87c7.214-7.889 14.423-17.06 21.459-27.246c12.376-1.098 24.068-2.894 34.671-5.345a134.17 134.17 0 0 1 1.386 6.193ZM87.276 214.515c-7.882 2.783-14.16 2.863-17.955.675c-8.075-4.657-11.432-22.636-6.853-46.752a156.923 156.923 0 0 1 1.869-8.499c10.486 2.32 22.093 3.988 34.498 4.994c7.084 9.967 14.501 19.128 21.976 27.15a134.668 134.668 0 0 1-4.877 4.492c-9.933 8.682-19.886 14.842-28.658 17.94ZM50.35 144.747c-12.483-4.267-22.792-9.812-29.858-15.863c-6.35-5.437-9.555-10.836-9.555-15.216c0-9.322 13.897-21.212 37.076-29.293c2.813-.98 5.757-1.905 8.812-2.773c3.204 10.42 7.406 21.315 12.477 32.332c-5.137 11.18-9.399 22.249-12.634 32.792a134.718 134.718 0 0 1-6.318-1.979Zm12.378-84.26c-4.811-24.587-1.616-43.134 6.425-47.789c8.564-4.958 27.502 2.111 47.463 19.835a144.318 144.318 0 0 1 3.841 3.545c-7.438 7.987-14.787 17.08-21.808 26.988c-12.04 1.116-23.565 2.908-34.161 5.309a160.342 160.342 0 0 1-1.76-7.887Zm110.427 27.268a347.8 347.8 0 0 0-7.785-12.803c8.168 1.033 15.994 2.404 23.343 4.08c-2.206 7.072-4.956 14.465-8.193 22.045a381.151 381.151 0 0 0-7.365-13.322Zm-45.032-43.861c5.044 5.465 10.096 11.566 15.065 18.186a322.04 322.04 0 0 0-30.257-.006c4.974-6.559 10.069-12.652 15.192-18.18ZM82.802 87.83a323.167 323.167 0 0 0-7.227 13.238c-3.184-7.553-5.909-14.98-8.134-22.152c7.304-1.634 15.093-2.97 23.209-3.984a321.524 321.524 0 0 0-7.848 12.897Zm8.081 65.352c-8.385-.936-16.291-2.203-23.593-3.793c2.26-7.3 5.045-14.885 8.298-22.6a321.187 321.187 0 0 0 7.257 13.246c2.594 4.48 5.28 8.868 8.038 13.147Zm37.542 31.03c-5.184-5.592-10.354-11.779-15.403-18.433c4.902.192 9.899.29 14.978.29c5.218 0 10.376-.117 15.453-.343c-4.985 6.774-10.018 12.97-15.028 18.486Zm52.198-57.817c3.422 7.8 6.306 15.345 8.596 22.52c-7.422 1.694-15.436 3.058-23.88 4.071a382.417 382.417 0 0 0 7.859-13.026a347.403 347.403 0 0 0 7.425-13.565Zm-16.898 8.101a358.557 358.557 0 0 1-12.281 19.815a329.4 329.4 0 0 1-23.444.823c-7.967 0-15.716-.248-23.178-.732a310.202 310.202 0 0 1-12.513-19.846h.001a307.41 307.41 0 0 1-10.923-20.627a310.278 310.278 0 0 1 10.89-20.637l-.001.001a307.318 307.318 0 0 1 12.413-19.761c7.613-.576 15.42-.876 23.31-.876H128c7.926 0 15.743.303 23.354.883a329.357 329.357 0 0 1 12.335 19.695a358.489 358.489 0 0 1 11.036 20.54a329.472 329.472 0 0 1-11 20.722Zm22.56-122.124c8.572 4.944 11.906 24.881 6.52 51.026c-.344 1.668-.73 3.367-1.15 5.09c-10.622-2.452-22.155-4.275-34.23-5.408c-7.034-10.017-14.323-19.124-21.64-27.008a160.789 160.789 0 0 1 5.888-5.4c18.9-16.447 36.564-22.941 44.612-18.3ZM128 90.808c12.625 0 22.86 10.235 22.86 22.86s-10.235 22.86-22.86 22.86s-22.86-10.235-22.86-22.86s10.235-22.86 22.86-22.86Z"></path></svg>

After

Width:  |  Height:  |  Size: 4.0 KiB

327
frontend/src/backup.tsx Normal file
View File

@@ -0,0 +1,327 @@
import { useState, useEffect, useMemo, useCallback, FC } from "react";
import Grid from "@mui/material/Grid";
import Box from "@mui/material/Box";
import { FullFileBrowser, FileBrowser, FileNavbar, FileToolbar, FileList, FileContextMenu, FileArray } from "chonky";
import { ChonkyActions, ChonkyFileActionData } from "chonky";
import { DndProvider as UntypedDndProvider, useDrop, DndProviderProps } from "react-dnd";
import { HTML5Backend } from "react-dnd-html5-backend";
import "./app.less";
import { cli, convertSourceFiles } from "./api";
import { Root } from "./api";
import { RenameFileAction, RefreshListAction } from "./actions";
import { useDetailModal, DetailModal, Detail } from "./detail";
const DndProvider = UntypedDndProvider as FC<DndProviderProps<any, any> & { children: JSX.Element[] }>;
const useBackupSourceBrowser = () =>
// openDetailModel: (detail: Detail) => void
{
const [files, setFiles] = useState<FileArray>(Array(1).fill(null));
const [folderChain, setFolderChan] = useState<FileArray>([Root]);
// const currentID = useMemo(() => {
// if (folderChain.length === 0) {
// return "0";
// }
// const last = folderChain.slice(-1)[0];
// if (!last) {
// return "0";
// }
// return last.id;
// }, [folderChain]);
const openFolder = useCallback((path: string) => {
(async () => {
const result = await cli.sourceList({ path }).response;
console.log("source list", {
path,
result,
converted: convertSourceFiles(result.children),
});
setFiles(convertSourceFiles(result.children));
setFolderChan(convertSourceFiles(result.chain));
})();
}, []);
useEffect(() => openFolder(""), []);
const onFileAction = useCallback(
(data: ChonkyFileActionData) => {
// console.log(data);
switch (data.id) {
case ChonkyActions.OpenFiles.id:
(async () => {
const { targetFile, files } = data.payload;
const fileToOpen = targetFile ?? files[0];
if (!fileToOpen) {
return;
}
if (fileToOpen.isDir) {
await openFolder(fileToOpen.id);
return;
}
// const file = await getFile(fileToOpen.id);
// await openDetailModel(file);
})();
return;
// case ChonkyActions.MoveFiles.id:
// (async () => {
// const { destination, files } = data.payload;
// for (const file of files) {
// await editFile(file.id, { parentid: destination.id });
// }
// await refreshAll();
// })();
// return;
// case RenameFileAction.id:
// (async () => {
// const files = data.state.selectedFilesForAction;
// if (files.length === 0) {
// return;
// }
// const file = files[0];
// const name = prompt("Provide new name for this file:", file.name);
// if (!name) {
// return;
// }
// await editFile(file.id, { name });
// await refreshAll();
// })();
// return;
// case ChonkyActions.CreateFolder.id:
// (async () => {
// const name = prompt("Provide the name for your new folder:");
// if (!name) {
// return;
// }
// await createFolder(currentID, { name });
// await refreshAll();
// })();
// return;
// case ChonkyActions.DeleteFiles.id:
// (async () => {
// const files = data.state.selectedFilesForAction;
// const fileids = files.map((file) => file.id);
// await deleteFolder(fileids);
// await refreshAll();
// })();
// return;
// case RefreshListAction.id:
// openFolder(currentID);
// return;
}
},
[openFolder]
);
const fileActions = useMemo(() => [ChonkyActions.StartDragNDrop, RefreshListAction], []);
return {
files,
folderChain,
onFileAction,
fileActions,
defaultFileViewActionId: ChonkyActions.EnableListView.id,
doubleClickDelay: 300,
};
};
const useBackupTargetBrowser = () =>
// openDetailModel: (detail: Detail) => void
{
const [files, setFiles] = useState<FileArray>(Array(1).fill(null));
const [folderChain, setFolderChan] = useState<FileArray>([Root]);
// const currentID = useMemo(() => {
// if (folderChain.length === 0) {
// return "0";
// }
// const last = folderChain.slice(-1)[0];
// if (!last) {
// return "0";
// }
// return last.id;
// }, [folderChain]);
const openFolder = useCallback((path: string) => {
(async () => {
const result = await cli.sourceList({ path }).response;
result.chain[0].name = "BackupSource";
setFiles(convertSourceFiles(result.children));
setFolderChan(convertSourceFiles(result.chain));
})();
}, []);
useEffect(() => openFolder(""), []);
const onFileAction = useCallback(
(data: ChonkyFileActionData) => {
// console.log(data);
switch (data.id) {
case ChonkyActions.OpenFiles.id:
(async () => {
const { targetFile, files } = data.payload;
const fileToOpen = targetFile ?? files[0];
if (!fileToOpen) {
return;
}
if (fileToOpen.isDir) {
await openFolder(fileToOpen.id);
return;
}
// const file = await getFile(fileToOpen.id);
// await openDetailModel(file);
})();
return;
// case ChonkyActions.MoveFiles.id:
// (async () => {
// const { destination, files } = data.payload;
// for (const file of files) {
// await editFile(file.id, { parentid: destination.id });
// }
// await refreshAll();
// })();
// return;
// case RenameFileAction.id:
// (async () => {
// const files = data.state.selectedFilesForAction;
// if (files.length === 0) {
// return;
// }
// const file = files[0];
// const name = prompt("Provide new name for this file:", file.name);
// if (!name) {
// return;
// }
// await editFile(file.id, { name });
// await refreshAll();
// })();
// return;
// case ChonkyActions.CreateFolder.id:
// (async () => {
// const name = prompt("Provide the name for your new folder:");
// if (!name) {
// return;
// }
// await createFolder(currentID, { name });
// await refreshAll();
// })();
// return;
// case ChonkyActions.DeleteFiles.id:
// (async () => {
// const files = data.state.selectedFilesForAction;
// const fileids = files.map((file) => file.id);
// await deleteFolder(fileids);
// await refreshAll();
// })();
// return;
// case RefreshListAction.id:
// openFolder(currentID);
// return;
}
},
[openFolder]
);
const fileActions = useMemo(() => [ChonkyActions.StartDragNDrop, RefreshListAction], []);
return {
files,
folderChain,
onFileAction,
fileActions,
defaultFileViewActionId: ChonkyActions.EnableListView.id,
doubleClickDelay: 300,
};
};
// const CustomDropZone = () => {
// const [maybeImpostor, setMaybeImpostor] = useState<string | null>(null);
// const [{ isOver, canDrop }, drop] = useDrop({
// accept: ChonkyDndFileEntryType,
// drop: (item: ChonkyDndFileEntryItem) => {
// setMaybeImpostor(item.payload.draggedFile.name);
// console.log("DnD payload:", item.payload);
// },
// // canDrop: (item: ChonkyDndFileEntryItem) => !item.payload.draggedFile.isDir,
// canDrop: (item: ChonkyDndFileEntryItem) => true,
// collect: (monitor) => ({
// isOver: monitor.isOver(),
// canDrop: monitor.canDrop(),
// }),
// });
// return (
// <div
// ref={drop}
// style={{
// boxShadow: "inset rgba(0, 0, 0, 0.6) 0 100px 0",
// backgroundImage: "url(./shadow-realm.gif)",
// lineHeight: "100px",
// textAlign: "center",
// fontSize: "1.4em",
// marginBottom: 20,
// borderRadius: 4,
// color: "#fff",
// height: 100,
// }}
// >
// {isOver
// ? canDrop
// ? "C'mon, drop 'em!"
// : "Folders are not allowed!"
// : maybeImpostor
// ? `${maybeImpostor} was not the impostor.`
// : "Drag & drop a (Chonky) file here"}
// </div>
// );
// };
export const BackupType = "backup";
export const BackupBrowser = () => {
const sourceProps = useBackupSourceBrowser();
const targetProps = useBackupTargetBrowser();
return (
<Box className="browser-box">
<Grid className="browser-container" container>
<Grid className="browser" item xs={6}>
{/* <CustomDropZone /> */}
<FullFileBrowser {...sourceProps} />
</Grid>
<Grid className="browser" item xs={6}>
<FileBrowser {...targetProps}>
<FileNavbar />
<FileToolbar />
<FileList />
<FileContextMenu />
</FileBrowser>
</Grid>
</Grid>
</Box>
);
};

23
frontend/src/detail.less Normal file
View File

@@ -0,0 +1,23 @@
.detail-content {
.position {
.app-MuiDialogContent-dividers {
border-bottom: none;
}
.app-MuiGrid-item {
width: 100%;
padding: 0.5em;
p, pre {
margin: 0.2em 0;
white-space: pre-wrap; /* Since CSS 2.1 */
white-space: -moz-pre-wrap; /* Mozilla, since 1999 */
white-space: -pre-wrap; /* Opera 4-6 */
white-space: -o-pre-wrap; /* Opera 7 */
word-wrap: break-word; /* Internet Explorer 5.5+ */
}
}
}
}

186
frontend/src/detail.tsx Normal file
View File

@@ -0,0 +1,186 @@
import { Nullable } from "tsdef";
import Dialog, { DialogProps } from "@mui/material/Dialog";
import DialogContent from "@mui/material/DialogContent";
import DialogTitle from "@mui/material/DialogTitle";
import { Grid } from "@mui/material";
import moment from "moment";
import { useState, useCallback } from "react";
import "./app.less";
import { cli } from "./api";
import { formatFilesize } from "./tools";
import "./detail.less";
import { FileGetReply, Tape } from "./entity";
export type Detail = FileGetReply & {
tapes: Map<bigint, Tape>;
};
export const useDetailModal = () => {
const [detail, setDetail] = useState<Nullable<Detail>>(null);
const openDetailModel = useCallback(
(detail: FileGetReply) => {
(async () => {
const tapeList = await cli.tapeMGet({
ids: detail.positions.map((posi) => posi.tapeId),
}).response;
const tapes = new Map<bigint, Tape>();
for (const tape of tapeList.tapes) {
tapes.set(tape.id, tape);
}
setDetail({ ...detail, tapes });
})();
},
[setDetail]
);
const closeDetailModel = useCallback(() => {
setDetail(null);
}, [setDetail]);
return { detail, closeDetailModel, openDetailModel };
};
export const DetailModal = (props: Omit<DialogProps, "open" | "children"> & { detail: Nullable<Detail> }) => {
const { detail, ...otherProps } = props;
if (!detail) {
return null;
}
return (
<Dialog className="detail-content" open={!!detail} scroll="body" {...otherProps}>
<DialogTitle id="scroll-dialog-title">{detail.file?.name}</DialogTitle>
<div className="position">
{detail.positions.map((posi) => {
const tape = detail.tapes?.get(posi.tapeId);
if (!tape) {
return null;
}
return (
<DialogContent dividers={true} key={`${posi.id}`}>
<Grid container spacing={1}>
<Grid item xs={4}>
<p>
<b>Tape ID</b>
</p>
</Grid>
<Grid item xs={8}>
<p>{tape?.barcode}</p>
</Grid>
<Grid item xs={4}>
<p>
<b>Tape Name</b>
</p>
</Grid>
<Grid item xs={8}>
<p>{tape?.name}</p>
</Grid>
<Grid item xs={4}>
<p>
<b>Tape Create Time</b>
</p>
</Grid>
<Grid item xs={8}>
<p>{tape?.createTime ? moment.unix(Number(tape.createTime)).format() : "--"}</p>
</Grid>
<Grid item xs={4}>
<p>
<b>Tape Destroy Time</b>
</p>
</Grid>
<Grid item xs={8}>
<p>{tape?.destroyTime ? (moment(Number(tape.destroyTime)).format() as string) : "--"}</p>
</Grid>
<Grid item xs={4}>
<p>
<b>Tape Capacity</b>
</p>
</Grid>
<Grid item xs={8}>
<p>{formatFilesize(tape?.capacityBytes)}</p>
</Grid>
<Grid item xs={4}>
<p>
<b>Tape Writen</b>
</p>
</Grid>
<Grid item xs={8}>
<p>{formatFilesize(tape?.writenBytes)}</p>
</Grid>
<Grid item xs={4}>
<p>
<b>Path</b>
</p>
</Grid>
<Grid item xs={8}>
<pre>{posi.path}</pre>
</Grid>
<Grid item xs={4}>
<p>
<b>Permission</b>
</p>
</Grid>
<Grid item xs={8}>
<p>{(Number(posi.mode) & 0o777).toString(8)}</p>
</Grid>
<Grid item xs={4}>
<p>
<b>Modify Time</b>
</p>
</Grid>
<Grid item xs={8}>
<p>{moment.unix(Number(posi.modTime)).format()}</p>
</Grid>
<Grid item xs={4}>
<p>
<b>Write Time</b>
</p>
</Grid>
<Grid item xs={8}>
<p>{moment.unix(Number(posi.writeTime)).format()}</p>
</Grid>
<Grid item xs={4}>
<p>
<b>Size</b>
</p>
</Grid>
<Grid item xs={8}>
<p>{formatFilesize(posi.size)}</p>
</Grid>
</Grid>
</DialogContent>
);
})}
</div>
{/* <DialogContentText
id="scroll-dialog-description"
ref={descriptionElementRef}
tabIndex={-1}
>
</DialogContentText> */}
{/* <DialogActions>
<Button onClick={handleClose}>Cancel</Button>
<Button onClick={handleClose}>Subscribe</Button>
</DialogActions> */}
</Dialog>
);
// return <Modal open={!!detail} {...otherProps}></Modal>;
};

View File

@@ -0,0 +1,34 @@
// @generated by protobuf-ts 2.8.2
// @generated from protobuf file "copy_status.proto" (package "copy_status", syntax proto3)
// tslint:disable
/**
* @generated from protobuf enum copy_status.CopyStatus
*/
export enum CopyStatus {
/**
* @generated from protobuf enum value: Draft = 0;
*/
Draft = 0,
/**
* waiting in queue
*
* @generated from protobuf enum value: Pending = 1;
*/
Pending = 1,
/**
* @generated from protobuf enum value: Running = 2;
*/
Running = 2,
/**
* @generated from protobuf enum value: Staged = 3;
*/
Staged = 3,
/**
* @generated from protobuf enum value: Submited = 4;
*/
Submited = 4,
/**
* @generated from protobuf enum value: Failed = 255;
*/
Failed = 255
}

202
frontend/src/entity/file.ts Normal file
View File

@@ -0,0 +1,202 @@
// @generated by protobuf-ts 2.8.2
// @generated from protobuf file "file.proto" (package "file", syntax proto3)
// tslint:disable
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import { WireType } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { reflectionMergePartial } from "@protobuf-ts/runtime";
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
/**
* @generated from protobuf message file.File
*/
export interface File {
/**
* @generated from protobuf field: int64 id = 1;
*/
id: bigint;
/**
* @generated from protobuf field: int64 parent_id = 2;
*/
parentId: bigint;
/**
* @generated from protobuf field: string name = 3;
*/
name: string;
/**
* @generated from protobuf field: int64 mode = 17;
*/
mode: bigint;
/**
* @generated from protobuf field: int64 mod_time = 18;
*/
modTime: bigint;
/**
* @generated from protobuf field: int64 size = 19;
*/
size: bigint;
/**
* @generated from protobuf field: bytes hash = 20;
*/
hash: Uint8Array;
}
/**
* @generated from protobuf message file.EditedFile
*/
export interface EditedFile {
/**
* @generated from protobuf field: optional int64 parent_id = 2;
*/
parentId?: bigint;
/**
* @generated from protobuf field: optional string name = 3;
*/
name?: string;
}
// @generated message type with reflection information, may provide speed optimized methods
class File$Type extends MessageType<File> {
constructor() {
super("file.File", [
{ no: 1, name: "id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 2, name: "parent_id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 17, name: "mode", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 18, name: "mod_time", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 19, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 20, name: "hash", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }
]);
}
create(value?: PartialMessage<File>): File {
const message = { id: 0n, parentId: 0n, name: "", mode: 0n, modTime: 0n, size: 0n, hash: new Uint8Array(0) };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<File>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: File): File {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 id */ 1:
message.id = reader.int64().toBigInt();
break;
case /* int64 parent_id */ 2:
message.parentId = reader.int64().toBigInt();
break;
case /* string name */ 3:
message.name = reader.string();
break;
case /* int64 mode */ 17:
message.mode = reader.int64().toBigInt();
break;
case /* int64 mod_time */ 18:
message.modTime = reader.int64().toBigInt();
break;
case /* int64 size */ 19:
message.size = reader.int64().toBigInt();
break;
case /* bytes hash */ 20:
message.hash = reader.bytes();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: File, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* int64 id = 1; */
if (message.id !== 0n)
writer.tag(1, WireType.Varint).int64(message.id);
/* int64 parent_id = 2; */
if (message.parentId !== 0n)
writer.tag(2, WireType.Varint).int64(message.parentId);
/* string name = 3; */
if (message.name !== "")
writer.tag(3, WireType.LengthDelimited).string(message.name);
/* int64 mode = 17; */
if (message.mode !== 0n)
writer.tag(17, WireType.Varint).int64(message.mode);
/* int64 mod_time = 18; */
if (message.modTime !== 0n)
writer.tag(18, WireType.Varint).int64(message.modTime);
/* int64 size = 19; */
if (message.size !== 0n)
writer.tag(19, WireType.Varint).int64(message.size);
/* bytes hash = 20; */
if (message.hash.length)
writer.tag(20, WireType.LengthDelimited).bytes(message.hash);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message file.File
*/
export const File = new File$Type();
// @generated message type with reflection information, may provide speed optimized methods
class EditedFile$Type extends MessageType<EditedFile> {
constructor() {
super("file.EditedFile", [
{ no: 2, name: "parent_id", kind: "scalar", opt: true, T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 3, name: "name", kind: "scalar", opt: true, T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<EditedFile>): EditedFile {
const message = {};
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<EditedFile>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: EditedFile): EditedFile {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* optional int64 parent_id */ 2:
message.parentId = reader.int64().toBigInt();
break;
case /* optional string name */ 3:
message.name = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: EditedFile, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* optional int64 parent_id = 2; */
if (message.parentId !== undefined)
writer.tag(2, WireType.Varint).int64(message.parentId);
/* optional string name = 3; */
if (message.name !== undefined)
writer.tag(3, WireType.LengthDelimited).string(message.name);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message file.EditedFile
*/
export const EditedFile = new EditedFile$Type();

View File

@@ -0,0 +1,12 @@
#!/usr/bin/env bash
set -e
CURDIR=$(cd $(dirname $0); pwd);
cd ${CURDIR};
echo '' > index.ts;
FILES=`ls *.ts | grep -v index.ts | sed -e 's/\.ts$//'`;
for file in ${FILES}; do
echo "export * from \"./${file}\";" >> index.ts;
done

View File

@@ -0,0 +1,11 @@
export * from "./copy_status";
export * from "./file";
export * from "./job";
export * from "./job_archive";
export * from "./job_restore";
export * from "./position";
export * from "./service.client";
export * from "./service";
export * from "./source";
export * from "./tape";

574
frontend/src/entity/job.ts Normal file
View File

@@ -0,0 +1,574 @@
// @generated by protobuf-ts 2.8.2
// @generated from protobuf file "job.proto" (package "job", syntax proto3)
// tslint:disable
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import { WireType } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { reflectionMergePartial } from "@protobuf-ts/runtime";
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
import { JobDisplayArchive } from "./job_archive";
import { JobArchiveNextParam } from "./job_archive";
import { JobStateArchive } from "./job_archive";
import { JobParamArchive } from "./job_archive";
/**
* @generated from protobuf message job.Job
*/
export interface Job {
/**
* @generated from protobuf field: int64 id = 1;
*/
id: bigint;
/**
* @generated from protobuf field: job.JobStatus status = 2;
*/
status: JobStatus;
/**
* @generated from protobuf field: int64 priority = 3;
*/
priority: bigint;
/**
* @generated from protobuf field: int64 create_time = 4;
*/
createTime: bigint;
/**
* @generated from protobuf field: int64 update_time = 5;
*/
updateTime: bigint;
/**
* @generated from protobuf field: job.JobState state = 17;
*/
state?: JobState;
}
/**
* @generated from protobuf message job.JobParam
*/
export interface JobParam {
/**
* @generated from protobuf oneof: param
*/
param: {
oneofKind: "archive";
/**
* @generated from protobuf field: job_archive.JobParamArchive Archive = 1 [json_name = "Archive"];
*/
archive: JobParamArchive;
} | {
oneofKind: undefined;
};
}
/**
* @generated from protobuf message job.JobState
*/
export interface JobState {
/**
* @generated from protobuf oneof: state
*/
state: {
oneofKind: "archive";
/**
* @generated from protobuf field: job_archive.JobStateArchive Archive = 1 [json_name = "Archive"];
*/
archive: JobStateArchive;
} | {
oneofKind: undefined;
};
}
/**
* @generated from protobuf message job.JobNextParam
*/
export interface JobNextParam {
/**
* @generated from protobuf oneof: param
*/
param: {
oneofKind: "archive";
/**
* @generated from protobuf field: job_archive.JobArchiveNextParam archive = 1;
*/
archive: JobArchiveNextParam;
} | {
oneofKind: undefined;
};
}
/**
* @generated from protobuf message job.CreatableJob
*/
export interface CreatableJob {
/**
* @generated from protobuf field: int64 priority = 3;
*/
priority: bigint;
/**
* @generated from protobuf field: job.JobParam param = 17;
*/
param?: JobParam;
}
/**
* @generated from protobuf message job.JobFilter
*/
export interface JobFilter {
/**
* @generated from protobuf field: optional job.JobStatus status = 1;
*/
status?: JobStatus;
/**
* @generated from protobuf field: optional int64 limit = 33;
*/
limit?: bigint;
/**
* @generated from protobuf field: optional int64 offset = 34;
*/
offset?: bigint;
}
/**
* @generated from protobuf message job.JobDisplay
*/
export interface JobDisplay {
/**
* @generated from protobuf oneof: display
*/
display: {
oneofKind: "archive";
/**
* @generated from protobuf field: job_archive.JobDisplayArchive archive = 1;
*/
archive: JobDisplayArchive;
} | {
oneofKind: undefined;
};
}
/**
* @generated from protobuf enum job.JobStatus
*/
export enum JobStatus {
/**
* @generated from protobuf enum value: Draft = 0;
*/
Draft = 0,
/**
* dependencies not satisfied
*
* @generated from protobuf enum value: NotReady = 1;
*/
NotReady = 1,
/**
* waiting in queue
*
* @generated from protobuf enum value: Pending = 2;
*/
Pending = 2,
/**
* @generated from protobuf enum value: Processing = 3;
*/
Processing = 3,
/**
* @generated from protobuf enum value: Completed = 4;
*/
Completed = 4,
/**
* @generated from protobuf enum value: Failed = 255;
*/
Failed = 255
}
// @generated message type with reflection information, may provide speed optimized methods
class Job$Type extends MessageType<Job> {
constructor() {
super("job.Job", [
{ no: 1, name: "id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 2, name: "status", kind: "enum", T: () => ["job.JobStatus", JobStatus] },
{ no: 3, name: "priority", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 4, name: "create_time", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 5, name: "update_time", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 17, name: "state", kind: "message", T: () => JobState }
]);
}
create(value?: PartialMessage<Job>): Job {
const message = { id: 0n, status: 0, priority: 0n, createTime: 0n, updateTime: 0n };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<Job>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Job): Job {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 id */ 1:
message.id = reader.int64().toBigInt();
break;
case /* job.JobStatus status */ 2:
message.status = reader.int32();
break;
case /* int64 priority */ 3:
message.priority = reader.int64().toBigInt();
break;
case /* int64 create_time */ 4:
message.createTime = reader.int64().toBigInt();
break;
case /* int64 update_time */ 5:
message.updateTime = reader.int64().toBigInt();
break;
case /* job.JobState state */ 17:
message.state = JobState.internalBinaryRead(reader, reader.uint32(), options, message.state);
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: Job, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* int64 id = 1; */
if (message.id !== 0n)
writer.tag(1, WireType.Varint).int64(message.id);
/* job.JobStatus status = 2; */
if (message.status !== 0)
writer.tag(2, WireType.Varint).int32(message.status);
/* int64 priority = 3; */
if (message.priority !== 0n)
writer.tag(3, WireType.Varint).int64(message.priority);
/* int64 create_time = 4; */
if (message.createTime !== 0n)
writer.tag(4, WireType.Varint).int64(message.createTime);
/* int64 update_time = 5; */
if (message.updateTime !== 0n)
writer.tag(5, WireType.Varint).int64(message.updateTime);
/* job.JobState state = 17; */
if (message.state)
JobState.internalBinaryWrite(message.state, writer.tag(17, WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message job.Job
*/
export const Job = new Job$Type();
// @generated message type with reflection information, may provide speed optimized methods
class JobParam$Type extends MessageType<JobParam> {
constructor() {
super("job.JobParam", [
{ no: 1, name: "Archive", kind: "message", jsonName: "Archive", oneof: "param", T: () => JobParamArchive }
]);
}
create(value?: PartialMessage<JobParam>): JobParam {
const message = { param: { oneofKind: undefined } };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<JobParam>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobParam): JobParam {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* job_archive.JobParamArchive Archive = 1 [json_name = "Archive"];*/ 1:
message.param = {
oneofKind: "archive",
archive: JobParamArchive.internalBinaryRead(reader, reader.uint32(), options, (message.param as any).archive)
};
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: JobParam, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* job_archive.JobParamArchive Archive = 1 [json_name = "Archive"]; */
if (message.param.oneofKind === "archive")
JobParamArchive.internalBinaryWrite(message.param.archive, writer.tag(1, WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message job.JobParam
*/
export const JobParam = new JobParam$Type();
// @generated message type with reflection information, may provide speed optimized methods
class JobState$Type extends MessageType<JobState> {
constructor() {
super("job.JobState", [
{ no: 1, name: "Archive", kind: "message", jsonName: "Archive", oneof: "state", T: () => JobStateArchive }
]);
}
create(value?: PartialMessage<JobState>): JobState {
const message = { state: { oneofKind: undefined } };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<JobState>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobState): JobState {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* job_archive.JobStateArchive Archive = 1 [json_name = "Archive"];*/ 1:
message.state = {
oneofKind: "archive",
archive: JobStateArchive.internalBinaryRead(reader, reader.uint32(), options, (message.state as any).archive)
};
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: JobState, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* job_archive.JobStateArchive Archive = 1 [json_name = "Archive"]; */
if (message.state.oneofKind === "archive")
JobStateArchive.internalBinaryWrite(message.state.archive, writer.tag(1, WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message job.JobState
*/
export const JobState = new JobState$Type();
// @generated message type with reflection information, may provide speed optimized methods
class JobNextParam$Type extends MessageType<JobNextParam> {
constructor() {
super("job.JobNextParam", [
{ no: 1, name: "archive", kind: "message", oneof: "param", T: () => JobArchiveNextParam }
]);
}
create(value?: PartialMessage<JobNextParam>): JobNextParam {
const message = { param: { oneofKind: undefined } };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<JobNextParam>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobNextParam): JobNextParam {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* job_archive.JobArchiveNextParam archive */ 1:
message.param = {
oneofKind: "archive",
archive: JobArchiveNextParam.internalBinaryRead(reader, reader.uint32(), options, (message.param as any).archive)
};
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: JobNextParam, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* job_archive.JobArchiveNextParam archive = 1; */
if (message.param.oneofKind === "archive")
JobArchiveNextParam.internalBinaryWrite(message.param.archive, writer.tag(1, WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message job.JobNextParam
*/
export const JobNextParam = new JobNextParam$Type();
// @generated message type with reflection information, may provide speed optimized methods
class CreatableJob$Type extends MessageType<CreatableJob> {
constructor() {
super("job.CreatableJob", [
{ no: 3, name: "priority", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 17, name: "param", kind: "message", T: () => JobParam }
]);
}
create(value?: PartialMessage<CreatableJob>): CreatableJob {
const message = { priority: 0n };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<CreatableJob>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreatableJob): CreatableJob {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 priority */ 3:
message.priority = reader.int64().toBigInt();
break;
case /* job.JobParam param */ 17:
message.param = JobParam.internalBinaryRead(reader, reader.uint32(), options, message.param);
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: CreatableJob, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* int64 priority = 3; */
if (message.priority !== 0n)
writer.tag(3, WireType.Varint).int64(message.priority);
/* job.JobParam param = 17; */
if (message.param)
JobParam.internalBinaryWrite(message.param, writer.tag(17, WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message job.CreatableJob
*/
export const CreatableJob = new CreatableJob$Type();
// @generated message type with reflection information, may provide speed optimized methods
class JobFilter$Type extends MessageType<JobFilter> {
constructor() {
super("job.JobFilter", [
{ no: 1, name: "status", kind: "enum", opt: true, T: () => ["job.JobStatus", JobStatus] },
{ no: 33, name: "limit", kind: "scalar", opt: true, T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 34, name: "offset", kind: "scalar", opt: true, T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }
]);
}
create(value?: PartialMessage<JobFilter>): JobFilter {
const message = {};
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<JobFilter>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobFilter): JobFilter {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* optional job.JobStatus status */ 1:
message.status = reader.int32();
break;
case /* optional int64 limit */ 33:
message.limit = reader.int64().toBigInt();
break;
case /* optional int64 offset */ 34:
message.offset = reader.int64().toBigInt();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: JobFilter, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* optional job.JobStatus status = 1; */
if (message.status !== undefined)
writer.tag(1, WireType.Varint).int32(message.status);
/* optional int64 limit = 33; */
if (message.limit !== undefined)
writer.tag(33, WireType.Varint).int64(message.limit);
/* optional int64 offset = 34; */
if (message.offset !== undefined)
writer.tag(34, WireType.Varint).int64(message.offset);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message job.JobFilter
*/
export const JobFilter = new JobFilter$Type();
// @generated message type with reflection information, may provide speed optimized methods
class JobDisplay$Type extends MessageType<JobDisplay> {
constructor() {
super("job.JobDisplay", [
{ no: 1, name: "archive", kind: "message", oneof: "display", T: () => JobDisplayArchive }
]);
}
create(value?: PartialMessage<JobDisplay>): JobDisplay {
const message = { display: { oneofKind: undefined } };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<JobDisplay>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobDisplay): JobDisplay {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* job_archive.JobDisplayArchive archive */ 1:
message.display = {
oneofKind: "archive",
archive: JobDisplayArchive.internalBinaryRead(reader, reader.uint32(), options, (message.display as any).archive)
};
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: JobDisplay, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* job_archive.JobDisplayArchive archive = 1; */
if (message.display.oneofKind === "archive")
JobDisplayArchive.internalBinaryWrite(message.display.archive, writer.tag(1, WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message job.JobDisplay
*/
export const JobDisplay = new JobDisplay$Type();

View File

@@ -0,0 +1,498 @@
// @generated by protobuf-ts 2.8.2
// @generated from protobuf file "job_archive.proto" (package "job_archive", syntax proto3)
// tslint:disable
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import { WireType } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { reflectionMergePartial } from "@protobuf-ts/runtime";
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
import { SourceState } from "./source";
import { Source } from "./source";
/**
* @generated from protobuf message job_archive.JobParamArchive
*/
export interface JobParamArchive {
/**
* @generated from protobuf field: repeated source.Source sources = 1;
*/
sources: Source[];
}
/**
* @generated from protobuf message job_archive.JobArchiveNextParam
*/
export interface JobArchiveNextParam {
/**
* @generated from protobuf oneof: param
*/
param: {
oneofKind: "waitForTape";
/**
* @generated from protobuf field: job_archive.JobArchiveWaitForTapeParam WaitForTape = 1 [json_name = "WaitForTape"];
*/
waitForTape: JobArchiveWaitForTapeParam;
} | {
oneofKind: "copying";
/**
* @generated from protobuf field: job_archive.JobArchiveCopyingParam Copying = 2 [json_name = "Copying"];
*/
copying: JobArchiveCopyingParam;
} | {
oneofKind: "finished";
/**
* @generated from protobuf field: job_archive.JobArchiveFinishedParam Finished = 255 [json_name = "Finished"];
*/
finished: JobArchiveFinishedParam;
} | {
oneofKind: undefined;
};
}
/**
* @generated from protobuf message job_archive.JobArchiveWaitForTapeParam
*/
export interface JobArchiveWaitForTapeParam {
}
/**
* @generated from protobuf message job_archive.JobArchiveCopyingParam
*/
export interface JobArchiveCopyingParam {
/**
* @generated from protobuf field: string device = 1;
*/
device: string;
/**
* @generated from protobuf field: string barcode = 2;
*/
barcode: string;
/**
* @generated from protobuf field: string name = 3;
*/
name: string;
}
/**
* @generated from protobuf message job_archive.JobArchiveFinishedParam
*/
export interface JobArchiveFinishedParam {
}
/**
* @generated from protobuf message job_archive.JobStateArchive
*/
export interface JobStateArchive {
/**
* @generated from protobuf field: job_archive.JobArchiveStep step = 1;
*/
step: JobArchiveStep;
/**
* @generated from protobuf field: repeated source.SourceState sources = 2;
*/
sources: SourceState[];
}
/**
* @generated from protobuf message job_archive.JobDisplayArchive
*/
export interface JobDisplayArchive {
/**
* @generated from protobuf field: int64 copyedBytes = 1;
*/
copyedBytes: bigint;
/**
* @generated from protobuf field: int64 copyedFiles = 2;
*/
copyedFiles: bigint;
/**
* @generated from protobuf field: int64 totalBytes = 3;
*/
totalBytes: bigint;
/**
* @generated from protobuf field: int64 totalFiles = 4;
*/
totalFiles: bigint;
/**
* @generated from protobuf field: optional int64 speed = 5;
*/
speed?: bigint;
}
/**
* @generated from protobuf enum job_archive.JobArchiveStep
*/
export enum JobArchiveStep {
/**
* @generated from protobuf enum value: Pending = 0;
*/
Pending = 0,
/**
* @generated from protobuf enum value: WaitForTape = 1;
*/
WaitForTape = 1,
/**
* @generated from protobuf enum value: Copying = 2;
*/
Copying = 2,
/**
* @generated from protobuf enum value: Finished = 255;
*/
Finished = 255
}
// @generated message type with reflection information, may provide speed optimized methods
class JobParamArchive$Type extends MessageType<JobParamArchive> {
constructor() {
super("job_archive.JobParamArchive", [
{ no: 1, name: "sources", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => Source }
]);
}
create(value?: PartialMessage<JobParamArchive>): JobParamArchive {
const message = { sources: [] };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<JobParamArchive>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobParamArchive): JobParamArchive {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* repeated source.Source sources */ 1:
message.sources.push(Source.internalBinaryRead(reader, reader.uint32(), options));
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: JobParamArchive, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* repeated source.Source sources = 1; */
for (let i = 0; i < message.sources.length; i++)
Source.internalBinaryWrite(message.sources[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message job_archive.JobParamArchive
*/
export const JobParamArchive = new JobParamArchive$Type();
// @generated message type with reflection information, may provide speed optimized methods
class JobArchiveNextParam$Type extends MessageType<JobArchiveNextParam> {
constructor() {
super("job_archive.JobArchiveNextParam", [
{ no: 1, name: "WaitForTape", kind: "message", jsonName: "WaitForTape", oneof: "param", T: () => JobArchiveWaitForTapeParam },
{ no: 2, name: "Copying", kind: "message", jsonName: "Copying", oneof: "param", T: () => JobArchiveCopyingParam },
{ no: 255, name: "Finished", kind: "message", jsonName: "Finished", oneof: "param", T: () => JobArchiveFinishedParam }
]);
}
create(value?: PartialMessage<JobArchiveNextParam>): JobArchiveNextParam {
const message = { param: { oneofKind: undefined } };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<JobArchiveNextParam>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobArchiveNextParam): JobArchiveNextParam {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* job_archive.JobArchiveWaitForTapeParam WaitForTape = 1 [json_name = "WaitForTape"];*/ 1:
message.param = {
oneofKind: "waitForTape",
waitForTape: JobArchiveWaitForTapeParam.internalBinaryRead(reader, reader.uint32(), options, (message.param as any).waitForTape)
};
break;
case /* job_archive.JobArchiveCopyingParam Copying = 2 [json_name = "Copying"];*/ 2:
message.param = {
oneofKind: "copying",
copying: JobArchiveCopyingParam.internalBinaryRead(reader, reader.uint32(), options, (message.param as any).copying)
};
break;
case /* job_archive.JobArchiveFinishedParam Finished = 255 [json_name = "Finished"];*/ 255:
message.param = {
oneofKind: "finished",
finished: JobArchiveFinishedParam.internalBinaryRead(reader, reader.uint32(), options, (message.param as any).finished)
};
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: JobArchiveNextParam, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* job_archive.JobArchiveWaitForTapeParam WaitForTape = 1 [json_name = "WaitForTape"]; */
if (message.param.oneofKind === "waitForTape")
JobArchiveWaitForTapeParam.internalBinaryWrite(message.param.waitForTape, writer.tag(1, WireType.LengthDelimited).fork(), options).join();
/* job_archive.JobArchiveCopyingParam Copying = 2 [json_name = "Copying"]; */
if (message.param.oneofKind === "copying")
JobArchiveCopyingParam.internalBinaryWrite(message.param.copying, writer.tag(2, WireType.LengthDelimited).fork(), options).join();
/* job_archive.JobArchiveFinishedParam Finished = 255 [json_name = "Finished"]; */
if (message.param.oneofKind === "finished")
JobArchiveFinishedParam.internalBinaryWrite(message.param.finished, writer.tag(255, WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message job_archive.JobArchiveNextParam
*/
export const JobArchiveNextParam = new JobArchiveNextParam$Type();
// @generated message type with reflection information, may provide speed optimized methods
class JobArchiveWaitForTapeParam$Type extends MessageType<JobArchiveWaitForTapeParam> {
constructor() {
super("job_archive.JobArchiveWaitForTapeParam", []);
}
create(value?: PartialMessage<JobArchiveWaitForTapeParam>): JobArchiveWaitForTapeParam {
const message = {};
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<JobArchiveWaitForTapeParam>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobArchiveWaitForTapeParam): JobArchiveWaitForTapeParam {
return target ?? this.create();
}
internalBinaryWrite(message: JobArchiveWaitForTapeParam, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message job_archive.JobArchiveWaitForTapeParam
*/
export const JobArchiveWaitForTapeParam = new JobArchiveWaitForTapeParam$Type();
// @generated message type with reflection information, may provide speed optimized methods
class JobArchiveCopyingParam$Type extends MessageType<JobArchiveCopyingParam> {
constructor() {
super("job_archive.JobArchiveCopyingParam", [
{ no: 1, name: "device", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "barcode", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<JobArchiveCopyingParam>): JobArchiveCopyingParam {
const message = { device: "", barcode: "", name: "" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<JobArchiveCopyingParam>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobArchiveCopyingParam): JobArchiveCopyingParam {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string device */ 1:
message.device = reader.string();
break;
case /* string barcode */ 2:
message.barcode = reader.string();
break;
case /* string name */ 3:
message.name = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: JobArchiveCopyingParam, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* string device = 1; */
if (message.device !== "")
writer.tag(1, WireType.LengthDelimited).string(message.device);
/* string barcode = 2; */
if (message.barcode !== "")
writer.tag(2, WireType.LengthDelimited).string(message.barcode);
/* string name = 3; */
if (message.name !== "")
writer.tag(3, WireType.LengthDelimited).string(message.name);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message job_archive.JobArchiveCopyingParam
*/
export const JobArchiveCopyingParam = new JobArchiveCopyingParam$Type();
// @generated message type with reflection information, may provide speed optimized methods
class JobArchiveFinishedParam$Type extends MessageType<JobArchiveFinishedParam> {
constructor() {
super("job_archive.JobArchiveFinishedParam", []);
}
create(value?: PartialMessage<JobArchiveFinishedParam>): JobArchiveFinishedParam {
const message = {};
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<JobArchiveFinishedParam>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobArchiveFinishedParam): JobArchiveFinishedParam {
return target ?? this.create();
}
internalBinaryWrite(message: JobArchiveFinishedParam, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message job_archive.JobArchiveFinishedParam
*/
export const JobArchiveFinishedParam = new JobArchiveFinishedParam$Type();
// @generated message type with reflection information, may provide speed optimized methods
class JobStateArchive$Type extends MessageType<JobStateArchive> {
constructor() {
super("job_archive.JobStateArchive", [
{ no: 1, name: "step", kind: "enum", T: () => ["job_archive.JobArchiveStep", JobArchiveStep] },
{ no: 2, name: "sources", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => SourceState }
]);
}
create(value?: PartialMessage<JobStateArchive>): JobStateArchive {
const message = { step: 0, sources: [] };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<JobStateArchive>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobStateArchive): JobStateArchive {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* job_archive.JobArchiveStep step */ 1:
message.step = reader.int32();
break;
case /* repeated source.SourceState sources */ 2:
message.sources.push(SourceState.internalBinaryRead(reader, reader.uint32(), options));
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: JobStateArchive, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* job_archive.JobArchiveStep step = 1; */
if (message.step !== 0)
writer.tag(1, WireType.Varint).int32(message.step);
/* repeated source.SourceState sources = 2; */
for (let i = 0; i < message.sources.length; i++)
SourceState.internalBinaryWrite(message.sources[i], writer.tag(2, WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message job_archive.JobStateArchive
*/
export const JobStateArchive = new JobStateArchive$Type();
// @generated message type with reflection information, may provide speed optimized methods
class JobDisplayArchive$Type extends MessageType<JobDisplayArchive> {
constructor() {
super("job_archive.JobDisplayArchive", [
{ no: 1, name: "copyedBytes", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 2, name: "copyedFiles", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 3, name: "totalBytes", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 4, name: "totalFiles", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 5, name: "speed", kind: "scalar", opt: true, T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }
]);
}
create(value?: PartialMessage<JobDisplayArchive>): JobDisplayArchive {
const message = { copyedBytes: 0n, copyedFiles: 0n, totalBytes: 0n, totalFiles: 0n };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<JobDisplayArchive>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobDisplayArchive): JobDisplayArchive {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 copyedBytes */ 1:
message.copyedBytes = reader.int64().toBigInt();
break;
case /* int64 copyedFiles */ 2:
message.copyedFiles = reader.int64().toBigInt();
break;
case /* int64 totalBytes */ 3:
message.totalBytes = reader.int64().toBigInt();
break;
case /* int64 totalFiles */ 4:
message.totalFiles = reader.int64().toBigInt();
break;
case /* optional int64 speed */ 5:
message.speed = reader.int64().toBigInt();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: JobDisplayArchive, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* int64 copyedBytes = 1; */
if (message.copyedBytes !== 0n)
writer.tag(1, WireType.Varint).int64(message.copyedBytes);
/* int64 copyedFiles = 2; */
if (message.copyedFiles !== 0n)
writer.tag(2, WireType.Varint).int64(message.copyedFiles);
/* int64 totalBytes = 3; */
if (message.totalBytes !== 0n)
writer.tag(3, WireType.Varint).int64(message.totalBytes);
/* int64 totalFiles = 4; */
if (message.totalFiles !== 0n)
writer.tag(4, WireType.Varint).int64(message.totalFiles);
/* optional int64 speed = 5; */
if (message.speed !== undefined)
writer.tag(5, WireType.Varint).int64(message.speed);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message job_archive.JobDisplayArchive
*/
export const JobDisplayArchive = new JobDisplayArchive$Type();

View File

@@ -0,0 +1,583 @@
// @generated by protobuf-ts 2.8.2
// @generated from protobuf file "job_restore.proto" (package "job_restore", syntax proto3)
// tslint:disable
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
import { WireType } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { reflectionMergePartial } from "@protobuf-ts/runtime";
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
import { CopyStatus } from "./copy_status";
/**
* @generated from protobuf message job_restore.JobParamRestore
*/
export interface JobParamRestore {
/**
* @generated from protobuf field: repeated int64 file_ids = 1;
*/
fileIds: bigint[];
}
/**
* @generated from protobuf message job_restore.JobRestoreNextParam
*/
export interface JobRestoreNextParam {
/**
* @generated from protobuf oneof: param
*/
param: {
oneofKind: "waitForTape";
/**
* @generated from protobuf field: job_restore.JobRestoreWaitForTapeParam WaitForTape = 1 [json_name = "WaitForTape"];
*/
waitForTape: JobRestoreWaitForTapeParam;
} | {
oneofKind: "copying";
/**
* @generated from protobuf field: job_restore.JobRestoreCopyingParam Copying = 2 [json_name = "Copying"];
*/
copying: JobRestoreCopyingParam;
} | {
oneofKind: "finished";
/**
* @generated from protobuf field: job_restore.JobRestoreFinishedParam Finished = 255 [json_name = "Finished"];
*/
finished: JobRestoreFinishedParam;
} | {
oneofKind: undefined;
};
}
/**
* @generated from protobuf message job_restore.JobRestoreWaitForTapeParam
*/
export interface JobRestoreWaitForTapeParam {
}
/**
* @generated from protobuf message job_restore.JobRestoreCopyingParam
*/
export interface JobRestoreCopyingParam {
/**
* @generated from protobuf field: string device = 1;
*/
device: string;
}
/**
* @generated from protobuf message job_restore.JobRestoreFinishedParam
*/
export interface JobRestoreFinishedParam {
}
/**
* @generated from protobuf message job_restore.FileRestoreState
*/
export interface FileRestoreState {
/**
* @generated from protobuf field: int64 file_id = 1;
*/
fileId: bigint;
/**
* @generated from protobuf field: copy_status.CopyStatus status = 2;
*/
status: CopyStatus;
/**
* @generated from protobuf field: int64 tape_id = 17;
*/
tapeId: bigint;
/**
* @generated from protobuf field: int64 position_id = 18;
*/
positionId: bigint;
/**
* @generated from protobuf field: repeated string path_in_tape = 19;
*/
pathInTape: string[];
}
/**
* @generated from protobuf message job_restore.JobStateRestore
*/
export interface JobStateRestore {
/**
* @generated from protobuf field: job_restore.JobRestoreStep step = 1;
*/
step: JobRestoreStep;
/**
* @generated from protobuf field: repeated job_restore.FileRestoreState files = 2;
*/
files: FileRestoreState[];
}
/**
* @generated from protobuf message job_restore.JobDisplayRestore
*/
export interface JobDisplayRestore {
/**
* @generated from protobuf field: int64 copyedBytes = 1;
*/
copyedBytes: bigint;
/**
* @generated from protobuf field: int64 copyedFiles = 2;
*/
copyedFiles: bigint;
/**
* @generated from protobuf field: int64 totalBytes = 3;
*/
totalBytes: bigint;
/**
* @generated from protobuf field: int64 totalFiles = 4;
*/
totalFiles: bigint;
/**
* @generated from protobuf field: bytes logs = 17;
*/
logs: Uint8Array;
}
/**
* @generated from protobuf enum job_restore.JobRestoreStep
*/
export enum JobRestoreStep {
/**
* @generated from protobuf enum value: Pending = 0;
*/
Pending = 0,
/**
* @generated from protobuf enum value: WaitForTape = 1;
*/
WaitForTape = 1,
/**
* @generated from protobuf enum value: Copying = 2;
*/
Copying = 2,
/**
* @generated from protobuf enum value: Finished = 255;
*/
Finished = 255
}
// @generated message type with reflection information, may provide speed optimized methods
class JobParamRestore$Type extends MessageType<JobParamRestore> {
constructor() {
super("job_restore.JobParamRestore", [
{ no: 1, name: "file_ids", kind: "scalar", repeat: 1 /*RepeatType.PACKED*/, T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }
]);
}
create(value?: PartialMessage<JobParamRestore>): JobParamRestore {
const message = { fileIds: [] };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<JobParamRestore>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobParamRestore): JobParamRestore {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* repeated int64 file_ids */ 1:
if (wireType === WireType.LengthDelimited)
for (let e = reader.int32() + reader.pos; reader.pos < e;)
message.fileIds.push(reader.int64().toBigInt());
else
message.fileIds.push(reader.int64().toBigInt());
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: JobParamRestore, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* repeated int64 file_ids = 1; */
if (message.fileIds.length) {
writer.tag(1, WireType.LengthDelimited).fork();
for (let i = 0; i < message.fileIds.length; i++)
writer.int64(message.fileIds[i]);
writer.join();
}
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message job_restore.JobParamRestore
*/
export const JobParamRestore = new JobParamRestore$Type();
// @generated message type with reflection information, may provide speed optimized methods
class JobRestoreNextParam$Type extends MessageType<JobRestoreNextParam> {
constructor() {
super("job_restore.JobRestoreNextParam", [
{ no: 1, name: "WaitForTape", kind: "message", jsonName: "WaitForTape", oneof: "param", T: () => JobRestoreWaitForTapeParam },
{ no: 2, name: "Copying", kind: "message", jsonName: "Copying", oneof: "param", T: () => JobRestoreCopyingParam },
{ no: 255, name: "Finished", kind: "message", jsonName: "Finished", oneof: "param", T: () => JobRestoreFinishedParam }
]);
}
create(value?: PartialMessage<JobRestoreNextParam>): JobRestoreNextParam {
const message = { param: { oneofKind: undefined } };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<JobRestoreNextParam>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobRestoreNextParam): JobRestoreNextParam {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* job_restore.JobRestoreWaitForTapeParam WaitForTape = 1 [json_name = "WaitForTape"];*/ 1:
message.param = {
oneofKind: "waitForTape",
waitForTape: JobRestoreWaitForTapeParam.internalBinaryRead(reader, reader.uint32(), options, (message.param as any).waitForTape)
};
break;
case /* job_restore.JobRestoreCopyingParam Copying = 2 [json_name = "Copying"];*/ 2:
message.param = {
oneofKind: "copying",
copying: JobRestoreCopyingParam.internalBinaryRead(reader, reader.uint32(), options, (message.param as any).copying)
};
break;
case /* job_restore.JobRestoreFinishedParam Finished = 255 [json_name = "Finished"];*/ 255:
message.param = {
oneofKind: "finished",
finished: JobRestoreFinishedParam.internalBinaryRead(reader, reader.uint32(), options, (message.param as any).finished)
};
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: JobRestoreNextParam, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* job_restore.JobRestoreWaitForTapeParam WaitForTape = 1 [json_name = "WaitForTape"]; */
if (message.param.oneofKind === "waitForTape")
JobRestoreWaitForTapeParam.internalBinaryWrite(message.param.waitForTape, writer.tag(1, WireType.LengthDelimited).fork(), options).join();
/* job_restore.JobRestoreCopyingParam Copying = 2 [json_name = "Copying"]; */
if (message.param.oneofKind === "copying")
JobRestoreCopyingParam.internalBinaryWrite(message.param.copying, writer.tag(2, WireType.LengthDelimited).fork(), options).join();
/* job_restore.JobRestoreFinishedParam Finished = 255 [json_name = "Finished"]; */
if (message.param.oneofKind === "finished")
JobRestoreFinishedParam.internalBinaryWrite(message.param.finished, writer.tag(255, WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message job_restore.JobRestoreNextParam
*/
export const JobRestoreNextParam = new JobRestoreNextParam$Type();
// @generated message type with reflection information, may provide speed optimized methods
class JobRestoreWaitForTapeParam$Type extends MessageType<JobRestoreWaitForTapeParam> {
constructor() {
super("job_restore.JobRestoreWaitForTapeParam", []);
}
create(value?: PartialMessage<JobRestoreWaitForTapeParam>): JobRestoreWaitForTapeParam {
const message = {};
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<JobRestoreWaitForTapeParam>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobRestoreWaitForTapeParam): JobRestoreWaitForTapeParam {
return target ?? this.create();
}
internalBinaryWrite(message: JobRestoreWaitForTapeParam, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message job_restore.JobRestoreWaitForTapeParam
*/
export const JobRestoreWaitForTapeParam = new JobRestoreWaitForTapeParam$Type();
// @generated message type with reflection information, may provide speed optimized methods
class JobRestoreCopyingParam$Type extends MessageType<JobRestoreCopyingParam> {
constructor() {
super("job_restore.JobRestoreCopyingParam", [
{ no: 1, name: "device", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<JobRestoreCopyingParam>): JobRestoreCopyingParam {
const message = { device: "" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<JobRestoreCopyingParam>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobRestoreCopyingParam): JobRestoreCopyingParam {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string device */ 1:
message.device = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: JobRestoreCopyingParam, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* string device = 1; */
if (message.device !== "")
writer.tag(1, WireType.LengthDelimited).string(message.device);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message job_restore.JobRestoreCopyingParam
*/
export const JobRestoreCopyingParam = new JobRestoreCopyingParam$Type();
// @generated message type with reflection information, may provide speed optimized methods
class JobRestoreFinishedParam$Type extends MessageType<JobRestoreFinishedParam> {
constructor() {
super("job_restore.JobRestoreFinishedParam", []);
}
create(value?: PartialMessage<JobRestoreFinishedParam>): JobRestoreFinishedParam {
const message = {};
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<JobRestoreFinishedParam>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobRestoreFinishedParam): JobRestoreFinishedParam {
return target ?? this.create();
}
internalBinaryWrite(message: JobRestoreFinishedParam, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message job_restore.JobRestoreFinishedParam
*/
export const JobRestoreFinishedParam = new JobRestoreFinishedParam$Type();
// @generated message type with reflection information, may provide speed optimized methods
class FileRestoreState$Type extends MessageType<FileRestoreState> {
constructor() {
super("job_restore.FileRestoreState", [
{ no: 1, name: "file_id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 2, name: "status", kind: "enum", T: () => ["copy_status.CopyStatus", CopyStatus] },
{ no: 17, name: "tape_id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 18, name: "position_id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 19, name: "path_in_tape", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<FileRestoreState>): FileRestoreState {
const message = { fileId: 0n, status: 0, tapeId: 0n, positionId: 0n, pathInTape: [] };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<FileRestoreState>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FileRestoreState): FileRestoreState {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 file_id */ 1:
message.fileId = reader.int64().toBigInt();
break;
case /* copy_status.CopyStatus status */ 2:
message.status = reader.int32();
break;
case /* int64 tape_id */ 17:
message.tapeId = reader.int64().toBigInt();
break;
case /* int64 position_id */ 18:
message.positionId = reader.int64().toBigInt();
break;
case /* repeated string path_in_tape */ 19:
message.pathInTape.push(reader.string());
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: FileRestoreState, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* int64 file_id = 1; */
if (message.fileId !== 0n)
writer.tag(1, WireType.Varint).int64(message.fileId);
/* copy_status.CopyStatus status = 2; */
if (message.status !== 0)
writer.tag(2, WireType.Varint).int32(message.status);
/* int64 tape_id = 17; */
if (message.tapeId !== 0n)
writer.tag(17, WireType.Varint).int64(message.tapeId);
/* int64 position_id = 18; */
if (message.positionId !== 0n)
writer.tag(18, WireType.Varint).int64(message.positionId);
/* repeated string path_in_tape = 19; */
for (let i = 0; i < message.pathInTape.length; i++)
writer.tag(19, WireType.LengthDelimited).string(message.pathInTape[i]);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message job_restore.FileRestoreState
*/
export const FileRestoreState = new FileRestoreState$Type();
// @generated message type with reflection information, may provide speed optimized methods
class JobStateRestore$Type extends MessageType<JobStateRestore> {
constructor() {
super("job_restore.JobStateRestore", [
{ no: 1, name: "step", kind: "enum", T: () => ["job_restore.JobRestoreStep", JobRestoreStep] },
{ no: 2, name: "files", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => FileRestoreState }
]);
}
create(value?: PartialMessage<JobStateRestore>): JobStateRestore {
const message = { step: 0, files: [] };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<JobStateRestore>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobStateRestore): JobStateRestore {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* job_restore.JobRestoreStep step */ 1:
message.step = reader.int32();
break;
case /* repeated job_restore.FileRestoreState files */ 2:
message.files.push(FileRestoreState.internalBinaryRead(reader, reader.uint32(), options));
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: JobStateRestore, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* job_restore.JobRestoreStep step = 1; */
if (message.step !== 0)
writer.tag(1, WireType.Varint).int32(message.step);
/* repeated job_restore.FileRestoreState files = 2; */
for (let i = 0; i < message.files.length; i++)
FileRestoreState.internalBinaryWrite(message.files[i], writer.tag(2, WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message job_restore.JobStateRestore
*/
export const JobStateRestore = new JobStateRestore$Type();
// @generated message type with reflection information, may provide speed optimized methods
class JobDisplayRestore$Type extends MessageType<JobDisplayRestore> {
constructor() {
super("job_restore.JobDisplayRestore", [
{ no: 1, name: "copyedBytes", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 2, name: "copyedFiles", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 3, name: "totalBytes", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 4, name: "totalFiles", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 17, name: "logs", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }
]);
}
create(value?: PartialMessage<JobDisplayRestore>): JobDisplayRestore {
const message = { copyedBytes: 0n, copyedFiles: 0n, totalBytes: 0n, totalFiles: 0n, logs: new Uint8Array(0) };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<JobDisplayRestore>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobDisplayRestore): JobDisplayRestore {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 copyedBytes */ 1:
message.copyedBytes = reader.int64().toBigInt();
break;
case /* int64 copyedFiles */ 2:
message.copyedFiles = reader.int64().toBigInt();
break;
case /* int64 totalBytes */ 3:
message.totalBytes = reader.int64().toBigInt();
break;
case /* int64 totalFiles */ 4:
message.totalFiles = reader.int64().toBigInt();
break;
case /* bytes logs */ 17:
message.logs = reader.bytes();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: JobDisplayRestore, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* int64 copyedBytes = 1; */
if (message.copyedBytes !== 0n)
writer.tag(1, WireType.Varint).int64(message.copyedBytes);
/* int64 copyedFiles = 2; */
if (message.copyedFiles !== 0n)
writer.tag(2, WireType.Varint).int64(message.copyedFiles);
/* int64 totalBytes = 3; */
if (message.totalBytes !== 0n)
writer.tag(3, WireType.Varint).int64(message.totalBytes);
/* int64 totalFiles = 4; */
if (message.totalFiles !== 0n)
writer.tag(4, WireType.Varint).int64(message.totalFiles);
/* bytes logs = 17; */
if (message.logs.length)
writer.tag(17, WireType.LengthDelimited).bytes(message.logs);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message job_restore.JobDisplayRestore
*/
export const JobDisplayRestore = new JobDisplayRestore$Type();

View File

@@ -0,0 +1,157 @@
// @generated by protobuf-ts 2.8.2
// @generated from protobuf file "position.proto" (package "position", syntax proto3)
// tslint:disable
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import { WireType } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { reflectionMergePartial } from "@protobuf-ts/runtime";
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
/**
* @generated from protobuf message position.Position
*/
export interface Position {
/**
* @generated from protobuf field: int64 id = 1;
*/
id: bigint;
/**
* @generated from protobuf field: int64 file_id = 2;
*/
fileId: bigint;
/**
* @generated from protobuf field: int64 tape_id = 3;
*/
tapeId: bigint;
/**
* @generated from protobuf field: string path = 4;
*/
path: string;
/**
* @generated from protobuf field: int64 mode = 17;
*/
mode: bigint;
/**
* @generated from protobuf field: int64 mod_time = 18;
*/
modTime: bigint;
/**
* @generated from protobuf field: int64 write_time = 19;
*/
writeTime: bigint;
/**
* @generated from protobuf field: int64 size = 20;
*/
size: bigint;
/**
* @generated from protobuf field: bytes hash = 21;
*/
hash: Uint8Array;
}
// @generated message type with reflection information, may provide speed optimized methods
class Position$Type extends MessageType<Position> {
constructor() {
super("position.Position", [
{ no: 1, name: "id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 2, name: "file_id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 3, name: "tape_id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 4, name: "path", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 17, name: "mode", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 18, name: "mod_time", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 19, name: "write_time", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 20, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 21, name: "hash", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }
]);
}
create(value?: PartialMessage<Position>): Position {
const message = { id: 0n, fileId: 0n, tapeId: 0n, path: "", mode: 0n, modTime: 0n, writeTime: 0n, size: 0n, hash: new Uint8Array(0) };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<Position>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Position): Position {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 id */ 1:
message.id = reader.int64().toBigInt();
break;
case /* int64 file_id */ 2:
message.fileId = reader.int64().toBigInt();
break;
case /* int64 tape_id */ 3:
message.tapeId = reader.int64().toBigInt();
break;
case /* string path */ 4:
message.path = reader.string();
break;
case /* int64 mode */ 17:
message.mode = reader.int64().toBigInt();
break;
case /* int64 mod_time */ 18:
message.modTime = reader.int64().toBigInt();
break;
case /* int64 write_time */ 19:
message.writeTime = reader.int64().toBigInt();
break;
case /* int64 size */ 20:
message.size = reader.int64().toBigInt();
break;
case /* bytes hash */ 21:
message.hash = reader.bytes();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: Position, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* int64 id = 1; */
if (message.id !== 0n)
writer.tag(1, WireType.Varint).int64(message.id);
/* int64 file_id = 2; */
if (message.fileId !== 0n)
writer.tag(2, WireType.Varint).int64(message.fileId);
/* int64 tape_id = 3; */
if (message.tapeId !== 0n)
writer.tag(3, WireType.Varint).int64(message.tapeId);
/* string path = 4; */
if (message.path !== "")
writer.tag(4, WireType.LengthDelimited).string(message.path);
/* int64 mode = 17; */
if (message.mode !== 0n)
writer.tag(17, WireType.Varint).int64(message.mode);
/* int64 mod_time = 18; */
if (message.modTime !== 0n)
writer.tag(18, WireType.Varint).int64(message.modTime);
/* int64 write_time = 19; */
if (message.writeTime !== 0n)
writer.tag(19, WireType.Varint).int64(message.writeTime);
/* int64 size = 20; */
if (message.size !== 0n)
writer.tag(20, WireType.Varint).int64(message.size);
/* bytes hash = 21; */
if (message.hash.length)
writer.tag(21, WireType.LengthDelimited).bytes(message.hash);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message position.Position
*/
export const Position = new Position$Type();

View File

@@ -0,0 +1,193 @@
// @generated by protobuf-ts 2.8.2
// @generated from protobuf file "service.proto" (package "service", syntax proto3)
// tslint:disable
import type { RpcTransport } from "@protobuf-ts/runtime-rpc";
import type { ServiceInfo } from "@protobuf-ts/runtime-rpc";
import { Service } from "./service";
import type { DeviceListReply } from "./service";
import type { DeviceListRequest } from "./service";
import type { SourceListReply } from "./service";
import type { SourceListRequest } from "./service";
import type { JobGetLogReply } from "./service";
import type { JobGetLogRequest } from "./service";
import type { JobDisplayReply } from "./service";
import type { JobDisplayRequest } from "./service";
import type { JobNextReply } from "./service";
import type { JobNextRequest } from "./service";
import type { JobCreateReply } from "./service";
import type { JobCreateRequest } from "./service";
import type { JobListReply } from "./service";
import type { JobListRequest } from "./service";
import type { TapeMGetReply } from "./service";
import type { TapeMGetRequest } from "./service";
import type { FileListParentsReply } from "./service";
import type { FileListParentsRequest } from "./service";
import type { FileDeleteReply } from "./service";
import type { FileDeleteRequest } from "./service";
import type { FileMkdirReply } from "./service";
import type { FileMkdirRequest } from "./service";
import type { FileEditReply } from "./service";
import type { FileEditRequest } from "./service";
import { stackIntercept } from "@protobuf-ts/runtime-rpc";
import type { FileGetReply } from "./service";
import type { FileGetRequest } from "./service";
import type { UnaryCall } from "@protobuf-ts/runtime-rpc";
import type { RpcOptions } from "@protobuf-ts/runtime-rpc";
/**
* @generated from protobuf service service.Service
*/
export interface IServiceClient {
/**
* @generated from protobuf rpc: FileGet(service.FileGetRequest) returns (service.FileGetReply);
*/
fileGet(input: FileGetRequest, options?: RpcOptions): UnaryCall<FileGetRequest, FileGetReply>;
/**
* @generated from protobuf rpc: FileEdit(service.FileEditRequest) returns (service.FileEditReply);
*/
fileEdit(input: FileEditRequest, options?: RpcOptions): UnaryCall<FileEditRequest, FileEditReply>;
/**
* @generated from protobuf rpc: FileMkdir(service.FileMkdirRequest) returns (service.FileMkdirReply);
*/
fileMkdir(input: FileMkdirRequest, options?: RpcOptions): UnaryCall<FileMkdirRequest, FileMkdirReply>;
/**
* @generated from protobuf rpc: FileDelete(service.FileDeleteRequest) returns (service.FileDeleteReply);
*/
fileDelete(input: FileDeleteRequest, options?: RpcOptions): UnaryCall<FileDeleteRequest, FileDeleteReply>;
/**
* @generated from protobuf rpc: FileListParents(service.FileListParentsRequest) returns (service.FileListParentsReply);
*/
fileListParents(input: FileListParentsRequest, options?: RpcOptions): UnaryCall<FileListParentsRequest, FileListParentsReply>;
/**
* @generated from protobuf rpc: TapeMGet(service.TapeMGetRequest) returns (service.TapeMGetReply);
*/
tapeMGet(input: TapeMGetRequest, options?: RpcOptions): UnaryCall<TapeMGetRequest, TapeMGetReply>;
/**
* @generated from protobuf rpc: JobList(service.JobListRequest) returns (service.JobListReply);
*/
jobList(input: JobListRequest, options?: RpcOptions): UnaryCall<JobListRequest, JobListReply>;
/**
* @generated from protobuf rpc: JobCreate(service.JobCreateRequest) returns (service.JobCreateReply);
*/
jobCreate(input: JobCreateRequest, options?: RpcOptions): UnaryCall<JobCreateRequest, JobCreateReply>;
/**
* @generated from protobuf rpc: JobNext(service.JobNextRequest) returns (service.JobNextReply);
*/
jobNext(input: JobNextRequest, options?: RpcOptions): UnaryCall<JobNextRequest, JobNextReply>;
/**
* @generated from protobuf rpc: JobDisplay(service.JobDisplayRequest) returns (service.JobDisplayReply);
*/
jobDisplay(input: JobDisplayRequest, options?: RpcOptions): UnaryCall<JobDisplayRequest, JobDisplayReply>;
/**
* @generated from protobuf rpc: JobGetLog(service.JobGetLogRequest) returns (service.JobGetLogReply);
*/
jobGetLog(input: JobGetLogRequest, options?: RpcOptions): UnaryCall<JobGetLogRequest, JobGetLogReply>;
/**
* @generated from protobuf rpc: SourceList(service.SourceListRequest) returns (service.SourceListReply);
*/
sourceList(input: SourceListRequest, options?: RpcOptions): UnaryCall<SourceListRequest, SourceListReply>;
/**
* @generated from protobuf rpc: DeviceList(service.DeviceListRequest) returns (service.DeviceListReply);
*/
deviceList(input: DeviceListRequest, options?: RpcOptions): UnaryCall<DeviceListRequest, DeviceListReply>;
}
/**
* @generated from protobuf service service.Service
*/
export class ServiceClient implements IServiceClient, ServiceInfo {
typeName = Service.typeName;
methods = Service.methods;
options = Service.options;
constructor(private readonly _transport: RpcTransport) {
}
/**
* @generated from protobuf rpc: FileGet(service.FileGetRequest) returns (service.FileGetReply);
*/
fileGet(input: FileGetRequest, options?: RpcOptions): UnaryCall<FileGetRequest, FileGetReply> {
const method = this.methods[0], opt = this._transport.mergeOptions(options);
return stackIntercept<FileGetRequest, FileGetReply>("unary", this._transport, method, opt, input);
}
/**
* @generated from protobuf rpc: FileEdit(service.FileEditRequest) returns (service.FileEditReply);
*/
fileEdit(input: FileEditRequest, options?: RpcOptions): UnaryCall<FileEditRequest, FileEditReply> {
const method = this.methods[1], opt = this._transport.mergeOptions(options);
return stackIntercept<FileEditRequest, FileEditReply>("unary", this._transport, method, opt, input);
}
/**
* @generated from protobuf rpc: FileMkdir(service.FileMkdirRequest) returns (service.FileMkdirReply);
*/
fileMkdir(input: FileMkdirRequest, options?: RpcOptions): UnaryCall<FileMkdirRequest, FileMkdirReply> {
const method = this.methods[2], opt = this._transport.mergeOptions(options);
return stackIntercept<FileMkdirRequest, FileMkdirReply>("unary", this._transport, method, opt, input);
}
/**
* @generated from protobuf rpc: FileDelete(service.FileDeleteRequest) returns (service.FileDeleteReply);
*/
fileDelete(input: FileDeleteRequest, options?: RpcOptions): UnaryCall<FileDeleteRequest, FileDeleteReply> {
const method = this.methods[3], opt = this._transport.mergeOptions(options);
return stackIntercept<FileDeleteRequest, FileDeleteReply>("unary", this._transport, method, opt, input);
}
/**
* @generated from protobuf rpc: FileListParents(service.FileListParentsRequest) returns (service.FileListParentsReply);
*/
fileListParents(input: FileListParentsRequest, options?: RpcOptions): UnaryCall<FileListParentsRequest, FileListParentsReply> {
const method = this.methods[4], opt = this._transport.mergeOptions(options);
return stackIntercept<FileListParentsRequest, FileListParentsReply>("unary", this._transport, method, opt, input);
}
/**
* @generated from protobuf rpc: TapeMGet(service.TapeMGetRequest) returns (service.TapeMGetReply);
*/
tapeMGet(input: TapeMGetRequest, options?: RpcOptions): UnaryCall<TapeMGetRequest, TapeMGetReply> {
const method = this.methods[5], opt = this._transport.mergeOptions(options);
return stackIntercept<TapeMGetRequest, TapeMGetReply>("unary", this._transport, method, opt, input);
}
/**
* @generated from protobuf rpc: JobList(service.JobListRequest) returns (service.JobListReply);
*/
jobList(input: JobListRequest, options?: RpcOptions): UnaryCall<JobListRequest, JobListReply> {
const method = this.methods[6], opt = this._transport.mergeOptions(options);
return stackIntercept<JobListRequest, JobListReply>("unary", this._transport, method, opt, input);
}
/**
* @generated from protobuf rpc: JobCreate(service.JobCreateRequest) returns (service.JobCreateReply);
*/
jobCreate(input: JobCreateRequest, options?: RpcOptions): UnaryCall<JobCreateRequest, JobCreateReply> {
const method = this.methods[7], opt = this._transport.mergeOptions(options);
return stackIntercept<JobCreateRequest, JobCreateReply>("unary", this._transport, method, opt, input);
}
/**
* @generated from protobuf rpc: JobNext(service.JobNextRequest) returns (service.JobNextReply);
*/
jobNext(input: JobNextRequest, options?: RpcOptions): UnaryCall<JobNextRequest, JobNextReply> {
const method = this.methods[8], opt = this._transport.mergeOptions(options);
return stackIntercept<JobNextRequest, JobNextReply>("unary", this._transport, method, opt, input);
}
/**
* @generated from protobuf rpc: JobDisplay(service.JobDisplayRequest) returns (service.JobDisplayReply);
*/
jobDisplay(input: JobDisplayRequest, options?: RpcOptions): UnaryCall<JobDisplayRequest, JobDisplayReply> {
const method = this.methods[9], opt = this._transport.mergeOptions(options);
return stackIntercept<JobDisplayRequest, JobDisplayReply>("unary", this._transport, method, opt, input);
}
/**
* @generated from protobuf rpc: JobGetLog(service.JobGetLogRequest) returns (service.JobGetLogReply);
*/
jobGetLog(input: JobGetLogRequest, options?: RpcOptions): UnaryCall<JobGetLogRequest, JobGetLogReply> {
const method = this.methods[10], opt = this._transport.mergeOptions(options);
return stackIntercept<JobGetLogRequest, JobGetLogReply>("unary", this._transport, method, opt, input);
}
/**
* @generated from protobuf rpc: SourceList(service.SourceListRequest) returns (service.SourceListReply);
*/
sourceList(input: SourceListRequest, options?: RpcOptions): UnaryCall<SourceListRequest, SourceListReply> {
const method = this.methods[11], opt = this._transport.mergeOptions(options);
return stackIntercept<SourceListRequest, SourceListReply>("unary", this._transport, method, opt, input);
}
/**
* @generated from protobuf rpc: DeviceList(service.DeviceListRequest) returns (service.DeviceListReply);
*/
deviceList(input: DeviceListRequest, options?: RpcOptions): UnaryCall<DeviceListRequest, DeviceListReply> {
const method = this.methods[12], opt = this._transport.mergeOptions(options);
return stackIntercept<DeviceListRequest, DeviceListReply>("unary", this._transport, method, opt, input);
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,281 @@
// @generated by protobuf-ts 2.8.2
// @generated from protobuf file "source.proto" (package "source", syntax proto3)
// tslint:disable
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import { WireType } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { reflectionMergePartial } from "@protobuf-ts/runtime";
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
import { CopyStatus } from "./copy_status";
/**
* @generated from protobuf message source.SourceFile
*/
export interface SourceFile {
/**
* @generated from protobuf field: string path = 1;
*/
path: string;
/**
* @generated from protobuf field: string parent_path = 2;
*/
parentPath: string;
/**
* @generated from protobuf field: string name = 3;
*/
name: string;
/**
* @generated from protobuf field: int64 mode = 17;
*/
mode: bigint;
/**
* @generated from protobuf field: int64 mod_time = 18;
*/
modTime: bigint;
/**
* @generated from protobuf field: int64 size = 19;
*/
size: bigint;
}
/**
* @generated from protobuf message source.Source
*/
export interface Source {
/**
* @generated from protobuf field: string base = 1;
*/
base: string;
/**
* @generated from protobuf field: repeated string path = 2;
*/
path: string[];
}
/**
* @generated from protobuf message source.SourceState
*/
export interface SourceState {
/**
* @generated from protobuf field: source.Source source = 1;
*/
source?: Source;
/**
* @generated from protobuf field: int64 size = 2;
*/
size: bigint;
/**
* @generated from protobuf field: copy_status.CopyStatus status = 3;
*/
status: CopyStatus;
/**
* @generated from protobuf field: optional string message = 4;
*/
message?: string;
}
// @generated message type with reflection information, may provide speed optimized methods
class SourceFile$Type extends MessageType<SourceFile> {
constructor() {
super("source.SourceFile", [
{ no: 1, name: "path", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "parent_path", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 17, name: "mode", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 18, name: "mod_time", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 19, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }
]);
}
create(value?: PartialMessage<SourceFile>): SourceFile {
const message = { path: "", parentPath: "", name: "", mode: 0n, modTime: 0n, size: 0n };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<SourceFile>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: SourceFile): SourceFile {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string path */ 1:
message.path = reader.string();
break;
case /* string parent_path */ 2:
message.parentPath = reader.string();
break;
case /* string name */ 3:
message.name = reader.string();
break;
case /* int64 mode */ 17:
message.mode = reader.int64().toBigInt();
break;
case /* int64 mod_time */ 18:
message.modTime = reader.int64().toBigInt();
break;
case /* int64 size */ 19:
message.size = reader.int64().toBigInt();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: SourceFile, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* string path = 1; */
if (message.path !== "")
writer.tag(1, WireType.LengthDelimited).string(message.path);
/* string parent_path = 2; */
if (message.parentPath !== "")
writer.tag(2, WireType.LengthDelimited).string(message.parentPath);
/* string name = 3; */
if (message.name !== "")
writer.tag(3, WireType.LengthDelimited).string(message.name);
/* int64 mode = 17; */
if (message.mode !== 0n)
writer.tag(17, WireType.Varint).int64(message.mode);
/* int64 mod_time = 18; */
if (message.modTime !== 0n)
writer.tag(18, WireType.Varint).int64(message.modTime);
/* int64 size = 19; */
if (message.size !== 0n)
writer.tag(19, WireType.Varint).int64(message.size);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message source.SourceFile
*/
export const SourceFile = new SourceFile$Type();
// @generated message type with reflection information, may provide speed optimized methods
class Source$Type extends MessageType<Source> {
constructor() {
super("source.Source", [
{ no: 1, name: "base", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "path", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<Source>): Source {
const message = { base: "", path: [] };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<Source>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Source): Source {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string base */ 1:
message.base = reader.string();
break;
case /* repeated string path */ 2:
message.path.push(reader.string());
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: Source, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* string base = 1; */
if (message.base !== "")
writer.tag(1, WireType.LengthDelimited).string(message.base);
/* repeated string path = 2; */
for (let i = 0; i < message.path.length; i++)
writer.tag(2, WireType.LengthDelimited).string(message.path[i]);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message source.Source
*/
export const Source = new Source$Type();
// @generated message type with reflection information, may provide speed optimized methods
class SourceState$Type extends MessageType<SourceState> {
constructor() {
super("source.SourceState", [
{ no: 1, name: "source", kind: "message", T: () => Source },
{ no: 2, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 3, name: "status", kind: "enum", T: () => ["copy_status.CopyStatus", CopyStatus] },
{ no: 4, name: "message", kind: "scalar", opt: true, T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<SourceState>): SourceState {
const message = { size: 0n, status: 0 };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<SourceState>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: SourceState): SourceState {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* source.Source source */ 1:
message.source = Source.internalBinaryRead(reader, reader.uint32(), options, message.source);
break;
case /* int64 size */ 2:
message.size = reader.int64().toBigInt();
break;
case /* copy_status.CopyStatus status */ 3:
message.status = reader.int32();
break;
case /* optional string message */ 4:
message.message = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: SourceState, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* source.Source source = 1; */
if (message.source)
Source.internalBinaryWrite(message.source, writer.tag(1, WireType.LengthDelimited).fork(), options).join();
/* int64 size = 2; */
if (message.size !== 0n)
writer.tag(2, WireType.Varint).int64(message.size);
/* copy_status.CopyStatus status = 3; */
if (message.status !== 0)
writer.tag(3, WireType.Varint).int32(message.status);
/* optional string message = 4; */
if (message.message !== undefined)
writer.tag(4, WireType.LengthDelimited).string(message.message);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message source.SourceState
*/
export const SourceState = new SourceState$Type();

146
frontend/src/entity/tape.ts Normal file
View File

@@ -0,0 +1,146 @@
// @generated by protobuf-ts 2.8.2
// @generated from protobuf file "tape.proto" (package "tape", syntax proto3)
// tslint:disable
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import { WireType } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { reflectionMergePartial } from "@protobuf-ts/runtime";
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
/**
* @generated from protobuf message tape.Tape
*/
export interface Tape {
/**
* @generated from protobuf field: int64 id = 1;
*/
id: bigint;
/**
* @generated from protobuf field: string barcode = 2;
*/
barcode: string;
/**
* @generated from protobuf field: string name = 3;
*/
name: string;
/**
* @generated from protobuf field: string encryption = 4;
*/
encryption: string;
/**
* @generated from protobuf field: int64 create_time = 17;
*/
createTime: bigint;
/**
* @generated from protobuf field: optional int64 destroy_time = 18;
*/
destroyTime?: bigint;
/**
* @generated from protobuf field: int64 capacity_bytes = 19;
*/
capacityBytes: bigint;
/**
* @generated from protobuf field: int64 writen_bytes = 20;
*/
writenBytes: bigint;
}
// @generated message type with reflection information, may provide speed optimized methods
class Tape$Type extends MessageType<Tape> {
constructor() {
super("tape.Tape", [
{ no: 1, name: "id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 2, name: "barcode", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 4, name: "encryption", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 17, name: "create_time", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 18, name: "destroy_time", kind: "scalar", opt: true, T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 19, name: "capacity_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 20, name: "writen_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }
]);
}
create(value?: PartialMessage<Tape>): Tape {
const message = { id: 0n, barcode: "", name: "", encryption: "", createTime: 0n, capacityBytes: 0n, writenBytes: 0n };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<Tape>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Tape): Tape {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 id */ 1:
message.id = reader.int64().toBigInt();
break;
case /* string barcode */ 2:
message.barcode = reader.string();
break;
case /* string name */ 3:
message.name = reader.string();
break;
case /* string encryption */ 4:
message.encryption = reader.string();
break;
case /* int64 create_time */ 17:
message.createTime = reader.int64().toBigInt();
break;
case /* optional int64 destroy_time */ 18:
message.destroyTime = reader.int64().toBigInt();
break;
case /* int64 capacity_bytes */ 19:
message.capacityBytes = reader.int64().toBigInt();
break;
case /* int64 writen_bytes */ 20:
message.writenBytes = reader.int64().toBigInt();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: Tape, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* int64 id = 1; */
if (message.id !== 0n)
writer.tag(1, WireType.Varint).int64(message.id);
/* string barcode = 2; */
if (message.barcode !== "")
writer.tag(2, WireType.LengthDelimited).string(message.barcode);
/* string name = 3; */
if (message.name !== "")
writer.tag(3, WireType.LengthDelimited).string(message.name);
/* string encryption = 4; */
if (message.encryption !== "")
writer.tag(4, WireType.LengthDelimited).string(message.encryption);
/* int64 create_time = 17; */
if (message.createTime !== 0n)
writer.tag(17, WireType.Varint).int64(message.createTime);
/* optional int64 destroy_time = 18; */
if (message.destroyTime !== undefined)
writer.tag(18, WireType.Varint).int64(message.destroyTime);
/* int64 capacity_bytes = 19; */
if (message.capacityBytes !== 0n)
writer.tag(19, WireType.Varint).int64(message.capacityBytes);
/* int64 writen_bytes = 20; */
if (message.writenBytes !== 0n)
writer.tag(20, WireType.Varint).int64(message.writenBytes);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message tape.Tape
*/
export const Tape = new Tape$Type();

184
frontend/src/file.tsx Normal file
View File

@@ -0,0 +1,184 @@
import { useState, useRef, useEffect, useMemo, useCallback } from "react";
import Grid from "@mui/material/Grid";
import Box from "@mui/material/Box";
import { FullFileBrowser, FileBrowserHandle, FileArray } from "chonky";
import { ChonkyActions, ChonkyFileActionData } from "chonky";
import "./app.less";
import { cli, convertFiles } from "./api";
import { Root } from "./api";
import { RenameFileAction, RefreshListAction } from "./actions";
import { useDetailModal, DetailModal } from "./detail";
import { FileGetReply } from "./entity";
const useDualSide = () => {
const left = useRef<FileBrowserHandle>(null);
const right = useRef<FileBrowserHandle>(null);
const instances = { left, right };
const refreshAll = useCallback(async () => {
await Promise.all(
Object.values(instances).map((ref) => {
if (!ref || !ref.current) {
return;
}
return ref.current.requestFileAction(RefreshListAction, {});
})
);
}, [instances]);
return { instances, refreshAll };
};
const useFileBrowser = (refreshAll: () => Promise<void>, openDetailModel: (detail: FileGetReply) => void) => {
const [files, setFiles] = useState<FileArray>(Array(1).fill(null));
const [folderChain, setFolderChan] = useState<FileArray>([Root]);
const currentID = useMemo(() => {
if (folderChain.length === 0) {
return "0";
}
const last = folderChain.slice(-1)[0];
if (!last) {
return "0";
}
return last.id;
}, [folderChain]);
const openFolder = useCallback((id: string) => {
(async () => {
const [file, folderChain] = await Promise.all([cli.fileGet({ id: BigInt(id) }).response, cli.fileListParents({ id: BigInt(id) }).response]);
setFiles(convertFiles(file.children));
setFolderChan([Root, ...convertFiles(folderChain.parents)]);
})();
}, []);
useEffect(() => openFolder(Root.id), []);
const onFileAction = useCallback(
(data: ChonkyFileActionData) => {
// console.log(data);
switch (data.id) {
case ChonkyActions.OpenFiles.id:
(async () => {
const { targetFile, files } = data.payload;
const fileToOpen = targetFile ?? files[0];
if (!fileToOpen) {
return;
}
if (fileToOpen.isDir) {
await openFolder(fileToOpen.id);
return;
}
const file = await cli.fileGet({ id: BigInt(fileToOpen.id) }).response;
await openDetailModel(file);
})();
return;
case ChonkyActions.MoveFiles.id:
(async () => {
const { destination, files } = data.payload;
for (const file of files) {
await cli.fileEdit({
id: BigInt(file.id),
file: { parentId: BigInt(destination.id) },
}).response;
}
await refreshAll();
})();
return;
case RenameFileAction.id:
(async () => {
const files = data.state.selectedFilesForAction;
if (files.length === 0) {
return;
}
const file = files[0];
const name = prompt("Provide new name for this file:", file.name);
if (!name) {
return;
}
await cli.fileEdit({ id: BigInt(file.id), file: { name } }).response;
await refreshAll();
})();
return;
case ChonkyActions.CreateFolder.id:
(async () => {
const name = prompt("Provide the name for your new folder:");
if (!name) {
return;
}
await cli.fileMkdir({ parentId: BigInt(currentID), path: name }).response;
await refreshAll();
})();
return;
case ChonkyActions.DeleteFiles.id:
(async () => {
const files = data.state.selectedFilesForAction;
const fileids = files.map((file) => BigInt(file.id));
await cli.fileDelete({ ids: fileids }).response;
await refreshAll();
})();
return;
case RefreshListAction.id:
openFolder(currentID);
return;
}
},
[openFolder, openDetailModel, refreshAll, currentID]
);
const fileActions = useMemo(() => [ChonkyActions.CreateFolder, ChonkyActions.DeleteFiles, ChonkyActions.MoveFiles, RenameFileAction, RefreshListAction], []);
return {
files,
folderChain,
onFileAction,
fileActions,
defaultFileViewActionId: ChonkyActions.EnableListView.id,
doubleClickDelay: 300,
};
};
export const FileBrowserType = "file";
export const FileBrowser = () => {
const { instances, refreshAll } = useDualSide();
const { detail, openDetailModel, closeDetailModel } = useDetailModal();
const leftProps = useFileBrowser(refreshAll, openDetailModel);
const rightProps = useFileBrowser(refreshAll, openDetailModel);
useEffect(() => {
Object.values(instances).map((inst) => inst.current?.requestFileAction(ChonkyActions.ToggleHiddenFiles, {}));
const interval = setInterval(() => {
Object.values(instances).map((inst) => inst.current && inst.current.requestFileAction(RefreshListAction, {}));
}, 10000);
return () => clearInterval(interval);
}, []);
return (
<Box className="browser-box">
<Grid className="browser-container" container>
<Grid className="browser" item xs={6}>
<FullFileBrowser instanceId="left" ref={instances.left} {...leftProps} />
</Grid>
<Grid className="browser" item xs={6}>
<FullFileBrowser instanceId="right" ref={instances.right} {...rightProps} />
</Grid>
</Grid>
<DetailModal detail={detail} onClose={closeDetailModel} />
</Box>
);
};

41
frontend/src/index.css Normal file
View File

@@ -0,0 +1,41 @@
:root {
padding: 0;
margin: 0;
height: 100%;
width: 100%;
overflow: hidden;
}
body {
padding: 0;
margin: 0;
height: 100%;
width: 100%;
background-color: #ffffff;
font-synthesis: none;
text-rendering: optimizeLegibility;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
-webkit-text-size-adjust: 100%;
}
#root {
height: 100%;
width: 100%;
margin: 0;
padding: 0;
text-align: center;
}
/*
:root {
font-family: Inter, Avenir, Helvetica, Arial, sans-serif;
font-size: 16px;
line-height: 24px;
font-weight: 400;
color-scheme: light dark;
color: rgba(255, 255, 255, 0.87);
} */

31
frontend/src/init.tsx Normal file
View File

@@ -0,0 +1,31 @@
import { setChonkyDefaults } from "chonky";
import { ChonkyIconFA } from "chonky-icon-fontawesome";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { faPencilAlt } from "@fortawesome/free-solid-svg-icons/faPencilAlt";
const ExternalIcons: Record<string, any> = {
edit: faPencilAlt,
};
setChonkyDefaults({
iconComponent: (props) => {
const icon = ExternalIcons[props.icon] as any;
if (!!icon) {
const faProps = {
...props,
icon: icon,
} as const;
return <FontAwesomeIcon {...faProps} />;
}
return <ChonkyIconFA {...props} />;
},
});
import { unstable_ClassNameGenerator as ClassNameGenerator } from "@mui/material/className";
ClassNameGenerator.configure(
// Do something with the componentName
(componentName: string) => `app-${componentName}`
);

471
frontend/src/jobs.tsx Normal file
View File

@@ -0,0 +1,471 @@
import { Fragment, ChangeEvent } from "react";
import { useState, useRef, useEffect, useMemo, useCallback, FC } from "react";
import { assert } from "@protobuf-ts/runtime";
import Grid from "@mui/material/Grid";
import Box from "@mui/material/Box";
import List from "@mui/material/List";
import ListItemButton from "@mui/material/ListItemButton";
import ListItemText from "@mui/material/ListItemText";
import Typography from "@mui/material/Typography";
import Card from "@mui/material/Card";
import CardActions from "@mui/material/CardActions";
import CardContent from "@mui/material/CardContent";
import Button from "@mui/material/Button";
import TextField from "@mui/material/TextField";
import MenuItem from "@mui/material/MenuItem";
import Dialog from "@mui/material/Dialog";
import DialogActions from "@mui/material/DialogActions";
import DialogContent from "@mui/material/DialogContent";
import DialogContentText from "@mui/material/DialogContentText";
import DialogTitle from "@mui/material/DialogTitle";
import LinearProgress from "@mui/material/LinearProgress";
import Divider from "@mui/material/Divider";
import "./app.less";
import { cli, sleep } from "./api";
import { Job, JobDisplay, JobCreateRequest, JobListRequest, JobNextRequest, JobStatus, CopyStatus } from "./entity";
import { JobArchiveCopyingParam, JobArchiveStep, JobDisplayArchive, JobParamArchive, JobStateArchive } from "./entity";
import { SourceState } from "./entity";
import { formatFilesize } from "./tools";
export const JobsType = "jobs";
type DisplayableJob = Job & Partial<JobDisplay>;
export const JobsBrowser = () => {
const [jobs, setJobs] = useState<DisplayableJob[]>([]);
const refresh = useCallback(async () => {
const jobReplys = await cli.jobList(JobListRequest.create({ param: { oneofKind: "list", list: {} } })).response;
const displayReplys = await Promise.all(jobReplys.jobs.map((job) => cli.jobDisplay({ id: job.id }).response));
const targets = jobReplys.jobs.map((job, idx) => ({ ...job, ...displayReplys[idx].display }));
console.log("refresh jobs list, ", targets);
setJobs(targets);
}, [setJobs]);
useEffect(() => {
refresh();
const timer = setInterval(refresh, 2000);
return () => {
clearInterval(timer);
};
}, []);
return (
<Box className="browser-box">
<Grid className="browser-container" container>
<Grid className="browser" item xs={2}>
<List
sx={{
width: "100%",
height: "100%",
bgcolor: "background.paper",
boxSizing: "border-box",
}}
component="nav"
// subheader={
// <ListSubheader component="div" id="nested-list-subheader">
// Nested List Items
// </ListSubheader>
// }
>
<NewArchiveDialog refresh={refresh} />
</List>
</Grid>
<Grid className="browser" item xs={10}>
{jobs.map((job) => (
<GetJobCard job={job} key={job.id.toString()} refresh={refresh} />
))}
</Grid>
</Grid>
</Box>
);
};
const GetJobCard = ({ job, refresh }: { job: DisplayableJob; refresh: () => Promise<void> }): JSX.Element => {
if (!job.state) {
return <JobCard job={job} />;
}
const type = job.state?.state.oneofKind;
switch (type) {
case "archive":
return (
<ArchiveCard job={job} refresh={refresh} state={job.state.state.archive} display={job.display?.oneofKind === "archive" ? job.display.archive : null} />
);
default:
return <JobCard job={job} />;
}
};
type ArchiveLastDisplay = { copyedBytes: bigint; lastUpdate: number };
const ArchiveCard = ({
job,
state,
display,
refresh,
}: {
job: Job;
state: JobStateArchive;
display: JobDisplayArchive | null;
refresh: () => Promise<void>;
}): JSX.Element => {
const [lastDisplay, setLastDisplay] = useState<ArchiveLastDisplay | null>(null);
const [speed, setSpeed] = useState<number>(NaN);
const diffDisplay = useCallback(
(current: ArchiveLastDisplay | null) => {
if (!lastDisplay) {
setLastDisplay(current);
return;
}
if (!current) {
setLastDisplay(current);
setSpeed(NaN);
return;
}
const duration = current.lastUpdate - lastDisplay.lastUpdate;
if (duration) {
const speed = ((Number(current.copyedBytes) - Number(lastDisplay.copyedBytes)) / duration) * 1000;
setSpeed(speed);
}
setLastDisplay(current);
},
[lastDisplay, setLastDisplay, setSpeed]
);
useEffect(() => {
if (!display) {
diffDisplay(null);
return;
}
diffDisplay({ copyedBytes: display.copyedBytes, lastUpdate: Date.now() });
}, [display]);
const [fields, progress] = useMemo(() => {
const totalFiles = state.sources.length;
let submitedFiles = 0,
submitedBytes = 0,
totalBytes = 0;
for (const file of state.sources) {
totalBytes += Number(file.size);
if (file.status !== CopyStatus.Submited) {
continue;
}
submitedFiles++;
submitedBytes += Number(file.size);
}
const copyedFiles = submitedFiles + Number(display?.copyedFiles || 0n);
const copyedBytes = submitedBytes + Number(display?.copyedBytes || 0n);
const progress = (totalBytes > 0 ? copyedBytes / totalBytes : 1) * 100;
const fields = [
{ name: "Current Step", value: JobArchiveStep[state.step] },
{ name: "Current Speed", value: !Number.isNaN(speed) ? `${formatFilesize(speed)}/s` : "--" },
{ name: "Total Files", value: totalFiles },
{ name: "Total Bytes", value: formatFilesize(totalBytes) },
{ name: "Submited Files", value: submitedFiles },
{ name: "Submited Bytes", value: formatFilesize(submitedBytes) },
{ name: "Copyed Files", value: copyedFiles },
{ name: "Copyed Bytes", value: formatFilesize(copyedBytes) },
];
return [fields, progress];
}, [state, display, speed]);
return (
<JobCard
job={job}
detail={
<Grid container spacing={2}>
<Grid item xs={12}>
<Box sx={{ paddingTop: "1em" }}>
<LinearProgress variant="determinate" value={progress} />
</Box>
</Grid>
{fields.map((field, idx) => (
<Grid item xs={12} md={3} key={idx}>
<Typography variant="body1">
<b>{field.name}</b>: {field.value}
</Typography>
</Grid>
))}
</Grid>
}
buttons={
<Fragment>
{state.step === JobArchiveStep.WaitForTape && <LoadTapeDialog job={job} refresh={refresh} />}
<ViewLogDialog jobID={job.id} />
<ArchiveViewFilesDialog sources={state.sources} />
</Fragment>
}
/>
);
};
const NewArchiveDialog = ({ refresh }: { refresh: () => Promise<void> }) => {
const [open, setOpen] = useState(false);
const handleClickOpen = () => {
setOpen(true);
};
const handleClose = () => {
setOpen(false);
};
const [source, setSource] = useState("");
const handleSubmit = async () => {
let path = source.trim();
if (path.length === 0) {
return;
}
while (path.endsWith("/")) {
path = path.slice(0, -1);
}
const splitIdx = path.lastIndexOf("/");
if (splitIdx < 0) {
return;
}
console.log(await cli.jobCreate(makeArchiveParam(1n, { sources: [{ base: path.slice(0, splitIdx + 1), path: [path.slice(splitIdx + 1)] }] })).response);
await refresh();
handleClose();
};
return (
<Fragment>
<ListItemButton onClick={handleClickOpen}>
<ListItemText primary="New Archive Job" />
</ListItemButton>
{open && (
<Dialog open={true} onClose={handleClose} maxWidth={"sm"} fullWidth>
<DialogTitle>New Archive Job</DialogTitle>
<DialogContent>
<TextField
autoFocus
margin="dense"
label="Source Path"
fullWidth
variant="standard"
value={source}
onChange={(event: ChangeEvent<HTMLInputElement>) => setSource(event.target.value)}
/>
</DialogContent>
<DialogActions>
<Button onClick={handleClose}>Cancel</Button>
<Button onClick={handleSubmit}>Submit</Button>
</DialogActions>
</Dialog>
)}
</Fragment>
);
};
const LoadTapeDialog = ({ job, refresh }: { job: Job; refresh: () => Promise<void> }) => {
const [devices, setDevices] = useState<string[]>([]);
const [param, setParam] = useState<JobArchiveCopyingParam | null>(null);
const handleClickOpen = async () => {
const reply = await cli.deviceList({}).response;
setDevices(reply.devices);
setParam(JobArchiveCopyingParam.create());
};
const handleClose = () => {
setParam(null);
setDevices([]);
};
const handleChange = (key: keyof JobArchiveCopyingParam) => (event: ChangeEvent<HTMLInputElement>) => {
if (param === null) {
return;
}
setParam({ ...param, [key]: event.target.value });
};
const handleSubmit = async () => {
if (!param) {
return;
}
const trimedParam: JobArchiveCopyingParam = {
device: param.device,
barcode: param.barcode.toUpperCase(),
name: param.name,
};
assert(trimedParam.barcode.length === 6);
const reply = await cli.jobNext(makeArchiveCopyingParam(job.id, trimedParam)).response;
console.log("job next reply= ", reply);
await refresh();
handleClose();
};
return (
<Fragment>
<Button size="small" onClick={handleClickOpen}>
Load Tape
</Button>
{param && (
<Dialog open={true} onClose={handleClose} maxWidth={"sm"} fullWidth>
<DialogTitle>Load Tape</DialogTitle>
<DialogContent>
<DialogContentText>After load tape into tape drive, click 'Submit'</DialogContentText>
<TextField select required margin="dense" label="Drive Device" fullWidth variant="standard" value={param.device} onChange={handleChange("device")}>
{devices.map((device) => (
<MenuItem key={device} value={device}>
{device}
</MenuItem>
))}
</TextField>
<TextField required margin="dense" label="Tape Barcode" fullWidth variant="standard" value={param.barcode} onChange={handleChange("barcode")} />
<TextField required margin="dense" label="Tape Name" fullWidth variant="standard" value={param.name} onChange={handleChange("name")} />
</DialogContent>
<DialogActions>
<Button onClick={handleClose}>Cancel</Button>
<Button onClick={handleSubmit}>Submit</Button>
</DialogActions>
</Dialog>
)}
</Fragment>
);
};
const ViewLogDialog = ({ jobID }: { jobID: bigint }) => {
const [open, setOpen] = useState(false);
const handleClickOpen = () => {
setOpen(true);
};
const handleClose = () => {
setOpen(false);
};
return (
<Fragment>
<Button size="small" onClick={handleClickOpen}>
View Log
</Button>
{open && (
<Dialog open={true} onClose={handleClose} maxWidth={"lg"} fullWidth scroll="paper" sx={{ height: "100%" }} className="view-log-dialog">
<DialogTitle>View Log</DialogTitle>
<DialogContent dividers>
<LogConsole jobId={jobID} />
</DialogContent>
<DialogActions>
<Button onClick={handleClose}>Close</Button>
</DialogActions>
</Dialog>
)}
</Fragment>
);
};
const LogConsole = ({ jobId }: { jobId: bigint }) => {
const [log, setLog] = useState<string>("");
const refreshLog = useCallback(async () => {
const reply = await cli.jobGetLog({ jobId, offset: BigInt(log.length) }).response;
setLog(log + new TextDecoder().decode(reply.logs));
}, [log, setLog]);
useEffect(() => {
let closed = false;
(async () => {
while (!closed) {
await refreshLog();
await sleep(2000);
}
})();
return () => {
closed = true;
};
}, []);
return <pre>{log || "loading..."}</pre>;
};
const ArchiveViewFilesDialog = ({ sources }: { sources: SourceState[] }) => {
const [open, setOpen] = useState(false);
const handleClickOpen = () => {
setOpen(true);
};
const handleClose = () => {
setOpen(false);
};
return (
<Fragment>
<Button size="small" onClick={handleClickOpen}>
View Files
</Button>
{open && (
<Dialog open={true} onClose={handleClose} maxWidth={"lg"} fullWidth scroll="paper" sx={{ height: "100%" }} className="view-log-dialog">
<DialogTitle>View Files</DialogTitle>
<DialogContent dividers>
{sources.map((src) => {
if (!src.source) {
return null;
}
return (
<ListItemText
primary={src.source.base + src.source.path.join("/")}
secondary={`Size: ${formatFilesize(src.size)} Status: ${CopyStatus[src.status]}`}
/>
);
})}
</DialogContent>
<DialogActions>
<Button onClick={handleClose}>Close</Button>
</DialogActions>
</Dialog>
)}
</Fragment>
);
};
const JobCard = ({ job, detail, buttons }: { job: Job; detail?: JSX.Element; buttons?: JSX.Element }) => {
return (
<Card sx={{ textAlign: "left" }} className="job-detail">
<CardContent>
<Typography sx={{ fontSize: 14 }} color="text.secondary" gutterBottom>
{`${JobStatus[job.status]}`}
</Typography>
<Typography variant="h5" component="div">{`${job.state?.state.oneofKind?.toUpperCase()} Job ${job.id}`}</Typography>
{detail}
</CardContent>
<Divider />
<CardActions>{buttons}</CardActions>
</Card>
);
};
function makeArchiveParam(priority: bigint, param: JobParamArchive): JobCreateRequest {
return {
job: {
priority,
param: {
param: {
oneofKind: "archive",
archive: param,
},
},
},
};
}
function makeArchiveCopyingParam(jobID: bigint, param: JobArchiveCopyingParam): JobNextRequest {
return {
id: jobID,
param: {
param: {
oneofKind: "archive",
archive: {
param: {
oneofKind: "copying",
copying: param,
},
},
},
},
};
}

10
frontend/src/main.tsx Normal file
View File

@@ -0,0 +1,10 @@
import React from "react";
import ReactDOM from "react-dom/client";
import App from "./app";
import "./index.css";
import "./init";
ReactDOM.createRoot(document.getElementById("root") as HTMLElement).render(
<App />
);

15
frontend/src/tools.ts Normal file
View File

@@ -0,0 +1,15 @@
import { filesize } from "filesize";
export const hexEncode = (buf: string) => {
var str = "";
for (var i = 0; i < buf.length; i++) {
str += buf[i].charCodeAt(0).toString(16);
}
return str;
};
export const formatFilesize = (size: number | bigint): string =>
filesize(size, {
base: 2,
standard: "jedec",
}) as string;

1
frontend/src/vite-env.d.ts vendored Normal file
View File

@@ -0,0 +1 @@
/// <reference types="vite/client" />

21
frontend/tsconfig.json Normal file
View File

@@ -0,0 +1,21 @@
{
"compilerOptions": {
"target": "es2020",
"useDefineForClassFields": true,
"lib": ["DOM", "DOM.Iterable", "es2020"],
"allowJs": false,
"skipLibCheck": true,
"esModuleInterop": false,
"allowSyntheticDefaultImports": true,
"strict": true,
"forceConsistentCasingInFileNames": true,
"module": "ESNext",
"moduleResolution": "Node",
"resolveJsonModule": true,
"isolatedModules": true,
"noEmit": true,
"jsx": "react-jsx"
},
"include": ["src"],
"references": [{ "path": "./tsconfig.node.json" }]
}

View File

@@ -0,0 +1,9 @@
{
"compilerOptions": {
"composite": true,
"module": "ESNext",
"moduleResolution": "Node",
"allowSyntheticDefaultImports": true
},
"include": ["vite.config.ts"]
}

15
frontend/vite.config.ts Normal file
View File

@@ -0,0 +1,15 @@
import { defineConfig } from "vite";
import react from "@vitejs/plugin-react";
// https://vitejs.dev/config/
export default defineConfig({
plugins: [react()],
build: {
target: "es2020",
},
optimizeDeps: {
esbuildOptions: {
target: "es2020",
},
},
});

52
go.mod
View File

@@ -1,12 +1,15 @@
module github.com/abc950309/tapewriter
go 1.17
go 1.18
require (
github.com/benmcclelland/mtio v0.0.0-20170506231306-f929531fb4fe
github.com/benmcclelland/sgio v0.0.0-20180629175614-f710aebf64c1
github.com/abc950309/acp v0.0.0-20221212144614-c5de5e555428
github.com/aws/aws-sdk-go v1.44.118
github.com/davecgh/go-spew v1.1.1
github.com/schollz/progressbar/v3 v3.10.1
github.com/deckarep/golang-set/v2 v2.1.0
github.com/gin-contrib/cors v1.4.0
github.com/gin-gonic/gin v1.8.1
github.com/google/uuid v1.3.0
github.com/sirupsen/logrus v1.9.0
gorm.io/driver/mysql v1.3.6
gorm.io/driver/sqlite v1.3.6
@@ -14,15 +17,46 @@ require (
)
require (
github.com/apache/thrift v0.17.0 // indirect
github.com/cenkalti/backoff/v4 v4.1.1 // indirect
github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f // indirect
github.com/gin-contrib/sse v0.1.0 // indirect
github.com/go-playground/locales v0.14.0 // indirect
github.com/go-playground/universal-translator v0.18.0 // indirect
github.com/go-playground/validator/v10 v10.10.0 // indirect
github.com/go-sql-driver/mysql v1.6.0 // indirect
github.com/goccy/go-json v0.9.7 // indirect
github.com/golang/protobuf v1.5.2 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/improbable-eng/grpc-web v0.15.0 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
github.com/klauspost/cpuid/v2 v2.0.4 // indirect
github.com/mattn/go-runewidth v0.0.13 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/compress v1.11.7 // indirect
github.com/klauspost/cpuid/v2 v2.2.2 // indirect
github.com/leodido/go-urn v1.2.1 // indirect
github.com/mattn/go-isatty v0.0.16 // indirect
github.com/mattn/go-runewidth v0.0.14 // indirect
github.com/mattn/go-sqlite3 v1.14.12 // indirect
github.com/minio/sha256-simd v1.0.0 // indirect
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db // indirect
github.com/rivo/uniseg v0.3.4 // indirect
golang.org/x/sys v0.0.0-20220829200755-d48e67d00261 // indirect
golang.org/x/term v0.0.0-20220722155259-a9ba230a4035 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/pelletier/go-toml/v2 v2.0.1 // indirect
github.com/rivo/uniseg v0.4.3 // indirect
github.com/rs/cors v1.7.0 // indirect
github.com/schollz/progressbar/v3 v3.12.2 // indirect
github.com/ugorji/go/codec v1.2.7 // indirect
golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97 // indirect
golang.org/x/net v0.0.0-20220722155237-a158d28d115b // indirect
golang.org/x/sys v0.3.0 // indirect
golang.org/x/term v0.3.0 // indirect
golang.org/x/text v0.4.0 // indirect
google.golang.org/genproto v0.0.0-20210126160654-44e461bb6506 // indirect
google.golang.org/grpc v1.51.0 // indirect
google.golang.org/protobuf v1.28.0 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
nhooyr.io/websocket v1.8.6 // indirect
)

Some files were not shown because too many files have changed in this diff Show More