feat: add tapes index view

This commit is contained in:
Samuel N Cui
2023-10-03 16:12:44 +08:00
parent 15079aaa50
commit 999b9bf9b2
27 changed files with 1357 additions and 358 deletions

View File

@@ -43,9 +43,7 @@ Or you can download binary from `releases`, and run the following shell commands
```shell
# If you put this to other path, you need to change scripts and systemd service file.
mkdir -p /opt/ltfs
mkdir -p /opt/yatm
tar -xvzf yatm-linux-amd64-${RELEASE_VERSION}.tar.gz -C /opt/yatm
cp /opt/yatm/config.example.yaml /opt/yatm/config.yaml

14
apis/library_trim.go Normal file
View File

@@ -0,0 +1,14 @@
package apis
import (
"context"
"github.com/samuelncui/yatm/entity"
)
func (api *API) LibraryTrim(ctx context.Context, req *entity.LibraryTrimRequest) (*entity.LibraryTrimReply, error) {
if err := api.lib.Trim(ctx, req.TrimPosition, req.TrimFile); err != nil {
return nil, err
}
return &entity.LibraryTrimReply{}, nil
}

View File

@@ -0,0 +1,17 @@
package apis
import (
"context"
"fmt"
"github.com/samuelncui/yatm/entity"
)
func (api *API) TapeGetPositions(ctx context.Context, req *entity.TapeGetPositionsRequest) (*entity.TapeGetPositionsReply, error) {
positions, err := api.lib.ListPositions(ctx, req.Id, req.Directory)
if err != nil {
return nil, fmt.Errorf("list position has error, %w", err)
}
return &entity.TapeGetPositionsReply{Positions: convertPositions(positions...)}, nil
}

View File

@@ -8,6 +8,7 @@ export TARGET_FILE="yatm-linux-amd64-${RELEASE_VERSION}.tar.gz"
rm -rf output;
mkdir -p output;
mkdir -p output/captured_indices;
cp -r scripts ./output/;
cp ./cmd/httpd/yatm-httpd.service ./output/
@@ -16,7 +17,7 @@ cp ./LICENSE ./output/
cp ./README.md ./output/
echo "${RELEASE_VERSION}" > ./output/VERSION
# docker run --rm -v $(pwd):/app golang:1.21 sh -c "cd /app && bash "
# docker run --rm -v $(pwd):/app golang:1.21 sh -c "cd /app && bash build_backend.sh"
# docker run --rm -v $(pwd):/app node:20-slim sh -c "cd /app && bash build_frontend.sh"
./build_backend.sh
./build_frontend.sh

File diff suppressed because it is too large Load Diff

View File

@@ -18,6 +18,7 @@ service Service {
rpc TapeList(TapeListRequest) returns (TapeListReply) {}
rpc TapeDelete(TapeDeleteRequest) returns (TapeDeleteReply) {}
rpc TapeGetPositions(TapeGetPositionsRequest) returns (TapeGetPositionsReply) {}
rpc JobList(JobListRequest) returns (JobListReply) {}
rpc JobCreate(JobCreateRequest) returns (JobCreateReply) {}
@@ -31,6 +32,7 @@ service Service {
rpc DeviceList(DeviceListRequest) returns (DeviceListReply) {}
rpc LibraryExport(LibraryExportRequest) returns (LibraryExportReply) {}
rpc LibraryTrim(LibraryTrimRequest) returns (LibraryTrimReply) {}
}
message FileGetRequest {
@@ -98,6 +100,15 @@ message TapeDeleteRequest {
message TapeDeleteReply {
}
message TapeGetPositionsRequest {
int64 id = 1;
string directory = 2;
}
message TapeGetPositionsReply {
repeated position.Position positions = 1;
}
message JobListRequest {
oneof param {
JobMGetRequest mget = 1;
@@ -177,3 +188,11 @@ message LibraryExportRequest {
message LibraryExportReply {
bytes json = 1;
}
message LibraryTrimRequest {
bool trim_position = 1;
bool trim_file = 2;
}
message LibraryTrimReply {
}

View File

@@ -29,6 +29,7 @@ type ServiceClient interface {
FileListParents(ctx context.Context, in *FileListParentsRequest, opts ...grpc.CallOption) (*FileListParentsReply, error)
TapeList(ctx context.Context, in *TapeListRequest, opts ...grpc.CallOption) (*TapeListReply, error)
TapeDelete(ctx context.Context, in *TapeDeleteRequest, opts ...grpc.CallOption) (*TapeDeleteReply, error)
TapeGetPositions(ctx context.Context, in *TapeGetPositionsRequest, opts ...grpc.CallOption) (*TapeGetPositionsReply, error)
JobList(ctx context.Context, in *JobListRequest, opts ...grpc.CallOption) (*JobListReply, error)
JobCreate(ctx context.Context, in *JobCreateRequest, opts ...grpc.CallOption) (*JobCreateReply, error)
JobDelete(ctx context.Context, in *JobDeleteRequest, opts ...grpc.CallOption) (*JobDeleteReply, error)
@@ -38,6 +39,7 @@ type ServiceClient interface {
SourceList(ctx context.Context, in *SourceListRequest, opts ...grpc.CallOption) (*SourceListReply, error)
DeviceList(ctx context.Context, in *DeviceListRequest, opts ...grpc.CallOption) (*DeviceListReply, error)
LibraryExport(ctx context.Context, in *LibraryExportRequest, opts ...grpc.CallOption) (*LibraryExportReply, error)
LibraryTrim(ctx context.Context, in *LibraryTrimRequest, opts ...grpc.CallOption) (*LibraryTrimReply, error)
}
type serviceClient struct {
@@ -111,6 +113,15 @@ func (c *serviceClient) TapeDelete(ctx context.Context, in *TapeDeleteRequest, o
return out, nil
}
func (c *serviceClient) TapeGetPositions(ctx context.Context, in *TapeGetPositionsRequest, opts ...grpc.CallOption) (*TapeGetPositionsReply, error) {
out := new(TapeGetPositionsReply)
err := c.cc.Invoke(ctx, "/service.Service/TapeGetPositions", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *serviceClient) JobList(ctx context.Context, in *JobListRequest, opts ...grpc.CallOption) (*JobListReply, error) {
out := new(JobListReply)
err := c.cc.Invoke(ctx, "/service.Service/JobList", in, out, opts...)
@@ -192,6 +203,15 @@ func (c *serviceClient) LibraryExport(ctx context.Context, in *LibraryExportRequ
return out, nil
}
func (c *serviceClient) LibraryTrim(ctx context.Context, in *LibraryTrimRequest, opts ...grpc.CallOption) (*LibraryTrimReply, error) {
out := new(LibraryTrimReply)
err := c.cc.Invoke(ctx, "/service.Service/LibraryTrim", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// ServiceServer is the server API for Service service.
// All implementations must embed UnimplementedServiceServer
// for forward compatibility
@@ -203,6 +223,7 @@ type ServiceServer interface {
FileListParents(context.Context, *FileListParentsRequest) (*FileListParentsReply, error)
TapeList(context.Context, *TapeListRequest) (*TapeListReply, error)
TapeDelete(context.Context, *TapeDeleteRequest) (*TapeDeleteReply, error)
TapeGetPositions(context.Context, *TapeGetPositionsRequest) (*TapeGetPositionsReply, error)
JobList(context.Context, *JobListRequest) (*JobListReply, error)
JobCreate(context.Context, *JobCreateRequest) (*JobCreateReply, error)
JobDelete(context.Context, *JobDeleteRequest) (*JobDeleteReply, error)
@@ -212,6 +233,7 @@ type ServiceServer interface {
SourceList(context.Context, *SourceListRequest) (*SourceListReply, error)
DeviceList(context.Context, *DeviceListRequest) (*DeviceListReply, error)
LibraryExport(context.Context, *LibraryExportRequest) (*LibraryExportReply, error)
LibraryTrim(context.Context, *LibraryTrimRequest) (*LibraryTrimReply, error)
mustEmbedUnimplementedServiceServer()
}
@@ -240,6 +262,9 @@ func (UnimplementedServiceServer) TapeList(context.Context, *TapeListRequest) (*
func (UnimplementedServiceServer) TapeDelete(context.Context, *TapeDeleteRequest) (*TapeDeleteReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method TapeDelete not implemented")
}
func (UnimplementedServiceServer) TapeGetPositions(context.Context, *TapeGetPositionsRequest) (*TapeGetPositionsReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method TapeGetPositions not implemented")
}
func (UnimplementedServiceServer) JobList(context.Context, *JobListRequest) (*JobListReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method JobList not implemented")
}
@@ -267,6 +292,9 @@ func (UnimplementedServiceServer) DeviceList(context.Context, *DeviceListRequest
func (UnimplementedServiceServer) LibraryExport(context.Context, *LibraryExportRequest) (*LibraryExportReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method LibraryExport not implemented")
}
func (UnimplementedServiceServer) LibraryTrim(context.Context, *LibraryTrimRequest) (*LibraryTrimReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method LibraryTrim not implemented")
}
func (UnimplementedServiceServer) mustEmbedUnimplementedServiceServer() {}
// UnsafeServiceServer may be embedded to opt out of forward compatibility for this service.
@@ -406,6 +434,24 @@ func _Service_TapeDelete_Handler(srv interface{}, ctx context.Context, dec func(
return interceptor(ctx, in, info, handler)
}
func _Service_TapeGetPositions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(TapeGetPositionsRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ServiceServer).TapeGetPositions(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/service.Service/TapeGetPositions",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ServiceServer).TapeGetPositions(ctx, req.(*TapeGetPositionsRequest))
}
return interceptor(ctx, in, info, handler)
}
func _Service_JobList_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(JobListRequest)
if err := dec(in); err != nil {
@@ -568,6 +614,24 @@ func _Service_LibraryExport_Handler(srv interface{}, ctx context.Context, dec fu
return interceptor(ctx, in, info, handler)
}
func _Service_LibraryTrim_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(LibraryTrimRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ServiceServer).LibraryTrim(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/service.Service/LibraryTrim",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ServiceServer).LibraryTrim(ctx, req.(*LibraryTrimRequest))
}
return interceptor(ctx, in, info, handler)
}
// Service_ServiceDesc is the grpc.ServiceDesc for Service service.
// It's only intended for direct use with grpc.RegisterService,
// and not to be introspected or modified (even as a copy)
@@ -603,6 +667,10 @@ var Service_ServiceDesc = grpc.ServiceDesc{
MethodName: "TapeDelete",
Handler: _Service_TapeDelete_Handler,
},
{
MethodName: "TapeGetPositions",
Handler: _Service_TapeGetPositions_Handler,
},
{
MethodName: "JobList",
Handler: _Service_JobList_Handler,
@@ -639,6 +707,10 @@ var Service_ServiceDesc = grpc.ServiceDesc{
MethodName: "LibraryExport",
Handler: _Service_LibraryExport_Handler,
},
{
MethodName: "LibraryTrim",
Handler: _Service_LibraryTrim_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "service.proto",

View File

@@ -1,5 +1,5 @@
{
"name": "tape-manager",
"name": "yatm",
"private": true,
"version": "0.0.0",
"type": "module",

View File

@@ -1,6 +1,6 @@
import { FileData } from "@aperturerobotics/chonky";
import { GrpcWebFetchTransport } from "@protobuf-ts/grpcweb-transport";
import { ServiceClient, File, SourceFile } from "./entity";
import { ServiceClient, File, SourceFile, Tape, Position } from "./entity";
import moment from "moment";
@@ -28,11 +28,6 @@ export const Root: FileData = {
droppable: true,
};
export const sleep = (ms: number): Promise<null> =>
new Promise((resolve) => {
setTimeout(resolve, ms);
});
const transport = new GrpcWebFetchTransport({
baseUrl: apiBase,
format: "binary",
@@ -81,6 +76,56 @@ export function convertSourceFiles(files: Array<SourceFile>): FileData[] {
});
}
export function convertTapes(tapes: Array<Tape>): FileData[] {
return tapes.map((tape) => {
// const isDir = (file.mode & ModeDir) > 0;
return {
id: `${tape.id}`,
name: tape.barcode,
ext: "",
isDir: true,
isHidden: false,
openable: true,
selectable: true,
draggable: false,
droppable: false,
size: 0,
modDate: moment.unix(Number(tape.createTime)).toDate(),
isTape: true,
};
});
}
export function convertPositions(positions: Array<Position>): FileData[] {
return positions.map((posi) => {
const isDir = (posi.mode & ModeDir) > 0;
const name = isDir ? splitPath(posi.path.slice(0, -1)) : splitPath(posi.path);
return {
id: `${posi.tapeId}:${posi.path}`,
name: name,
ext: extname(name),
isDir: isDir,
isHidden: false,
openable: isDir,
selectable: false,
draggable: false,
droppable: false,
size: Number(posi.size),
modDate: moment.unix(Number(posi.writeTime)).toDate(),
};
});
}
function splitPath(filename: string): string {
const idx = filename.lastIndexOf("/");
if (idx < 0) {
return filename;
}
return filename.slice(idx + 1);
}
function extname(filename: string): string {
const idx = filename.lastIndexOf(".");
if (idx < 0) {

View File

@@ -12,7 +12,7 @@ import { TapesBrowser, TapesType } from "./pages/tapes";
import { JobsBrowser, JobsType } from "./pages/jobs";
import "./app.less";
import { sleep } from "./api";
import { sleep } from "./tools";
import { Nullable } from "tsdef";
import { Job } from "./entity";
import { useEffect } from "react";
@@ -46,7 +46,7 @@ const App = () => {
(_: ChangeEvent<{}>, newValue: string) => {
navigate("/" + newValue);
},
[navigate]
[navigate],
);
return (

View File

@@ -4,6 +4,8 @@
import type { RpcTransport } from "@protobuf-ts/runtime-rpc";
import type { ServiceInfo } from "@protobuf-ts/runtime-rpc";
import { Service } from "./service";
import type { LibraryTrimReply } from "./service";
import type { LibraryTrimRequest } from "./service";
import type { LibraryExportReply } from "./service";
import type { LibraryExportRequest } from "./service";
import type { DeviceListReply } from "./service";
@@ -22,6 +24,8 @@ import type { JobCreateReply } from "./service";
import type { JobCreateRequest } from "./service";
import type { JobListReply } from "./service";
import type { JobListRequest } from "./service";
import type { TapeGetPositionsReply } from "./service";
import type { TapeGetPositionsRequest } from "./service";
import type { TapeDeleteReply } from "./service";
import type { TapeDeleteRequest } from "./service";
import type { TapeListReply } from "./service";
@@ -71,6 +75,10 @@ export interface IServiceClient {
* @generated from protobuf rpc: TapeDelete(service.TapeDeleteRequest) returns (service.TapeDeleteReply);
*/
tapeDelete(input: TapeDeleteRequest, options?: RpcOptions): UnaryCall<TapeDeleteRequest, TapeDeleteReply>;
/**
* @generated from protobuf rpc: TapeGetPositions(service.TapeGetPositionsRequest) returns (service.TapeGetPositionsReply);
*/
tapeGetPositions(input: TapeGetPositionsRequest, options?: RpcOptions): UnaryCall<TapeGetPositionsRequest, TapeGetPositionsReply>;
/**
* @generated from protobuf rpc: JobList(service.JobListRequest) returns (service.JobListReply);
*/
@@ -107,6 +115,10 @@ export interface IServiceClient {
* @generated from protobuf rpc: LibraryExport(service.LibraryExportRequest) returns (service.LibraryExportReply);
*/
libraryExport(input: LibraryExportRequest, options?: RpcOptions): UnaryCall<LibraryExportRequest, LibraryExportReply>;
/**
* @generated from protobuf rpc: LibraryTrim(service.LibraryTrimRequest) returns (service.LibraryTrimReply);
*/
libraryTrim(input: LibraryTrimRequest, options?: RpcOptions): UnaryCall<LibraryTrimRequest, LibraryTrimReply>;
}
/**
* @generated from protobuf service service.Service
@@ -166,67 +178,81 @@ export class ServiceClient implements IServiceClient, ServiceInfo {
const method = this.methods[6], opt = this._transport.mergeOptions(options);
return stackIntercept<TapeDeleteRequest, TapeDeleteReply>("unary", this._transport, method, opt, input);
}
/**
* @generated from protobuf rpc: TapeGetPositions(service.TapeGetPositionsRequest) returns (service.TapeGetPositionsReply);
*/
tapeGetPositions(input: TapeGetPositionsRequest, options?: RpcOptions): UnaryCall<TapeGetPositionsRequest, TapeGetPositionsReply> {
const method = this.methods[7], opt = this._transport.mergeOptions(options);
return stackIntercept<TapeGetPositionsRequest, TapeGetPositionsReply>("unary", this._transport, method, opt, input);
}
/**
* @generated from protobuf rpc: JobList(service.JobListRequest) returns (service.JobListReply);
*/
jobList(input: JobListRequest, options?: RpcOptions): UnaryCall<JobListRequest, JobListReply> {
const method = this.methods[7], opt = this._transport.mergeOptions(options);
const method = this.methods[8], opt = this._transport.mergeOptions(options);
return stackIntercept<JobListRequest, JobListReply>("unary", this._transport, method, opt, input);
}
/**
* @generated from protobuf rpc: JobCreate(service.JobCreateRequest) returns (service.JobCreateReply);
*/
jobCreate(input: JobCreateRequest, options?: RpcOptions): UnaryCall<JobCreateRequest, JobCreateReply> {
const method = this.methods[8], opt = this._transport.mergeOptions(options);
const method = this.methods[9], opt = this._transport.mergeOptions(options);
return stackIntercept<JobCreateRequest, JobCreateReply>("unary", this._transport, method, opt, input);
}
/**
* @generated from protobuf rpc: JobDelete(service.JobDeleteRequest) returns (service.JobDeleteReply);
*/
jobDelete(input: JobDeleteRequest, options?: RpcOptions): UnaryCall<JobDeleteRequest, JobDeleteReply> {
const method = this.methods[9], opt = this._transport.mergeOptions(options);
const method = this.methods[10], opt = this._transport.mergeOptions(options);
return stackIntercept<JobDeleteRequest, JobDeleteReply>("unary", this._transport, method, opt, input);
}
/**
* @generated from protobuf rpc: JobNext(service.JobNextRequest) returns (service.JobNextReply);
*/
jobNext(input: JobNextRequest, options?: RpcOptions): UnaryCall<JobNextRequest, JobNextReply> {
const method = this.methods[10], opt = this._transport.mergeOptions(options);
const method = this.methods[11], opt = this._transport.mergeOptions(options);
return stackIntercept<JobNextRequest, JobNextReply>("unary", this._transport, method, opt, input);
}
/**
* @generated from protobuf rpc: JobDisplay(service.JobDisplayRequest) returns (service.JobDisplayReply);
*/
jobDisplay(input: JobDisplayRequest, options?: RpcOptions): UnaryCall<JobDisplayRequest, JobDisplayReply> {
const method = this.methods[11], opt = this._transport.mergeOptions(options);
const method = this.methods[12], opt = this._transport.mergeOptions(options);
return stackIntercept<JobDisplayRequest, JobDisplayReply>("unary", this._transport, method, opt, input);
}
/**
* @generated from protobuf rpc: JobGetLog(service.JobGetLogRequest) returns (service.JobGetLogReply);
*/
jobGetLog(input: JobGetLogRequest, options?: RpcOptions): UnaryCall<JobGetLogRequest, JobGetLogReply> {
const method = this.methods[12], opt = this._transport.mergeOptions(options);
const method = this.methods[13], opt = this._transport.mergeOptions(options);
return stackIntercept<JobGetLogRequest, JobGetLogReply>("unary", this._transport, method, opt, input);
}
/**
* @generated from protobuf rpc: SourceList(service.SourceListRequest) returns (service.SourceListReply);
*/
sourceList(input: SourceListRequest, options?: RpcOptions): UnaryCall<SourceListRequest, SourceListReply> {
const method = this.methods[13], opt = this._transport.mergeOptions(options);
const method = this.methods[14], opt = this._transport.mergeOptions(options);
return stackIntercept<SourceListRequest, SourceListReply>("unary", this._transport, method, opt, input);
}
/**
* @generated from protobuf rpc: DeviceList(service.DeviceListRequest) returns (service.DeviceListReply);
*/
deviceList(input: DeviceListRequest, options?: RpcOptions): UnaryCall<DeviceListRequest, DeviceListReply> {
const method = this.methods[14], opt = this._transport.mergeOptions(options);
const method = this.methods[15], opt = this._transport.mergeOptions(options);
return stackIntercept<DeviceListRequest, DeviceListReply>("unary", this._transport, method, opt, input);
}
/**
* @generated from protobuf rpc: LibraryExport(service.LibraryExportRequest) returns (service.LibraryExportReply);
*/
libraryExport(input: LibraryExportRequest, options?: RpcOptions): UnaryCall<LibraryExportRequest, LibraryExportReply> {
const method = this.methods[15], opt = this._transport.mergeOptions(options);
const method = this.methods[16], opt = this._transport.mergeOptions(options);
return stackIntercept<LibraryExportRequest, LibraryExportReply>("unary", this._transport, method, opt, input);
}
/**
* @generated from protobuf rpc: LibraryTrim(service.LibraryTrimRequest) returns (service.LibraryTrimReply);
*/
libraryTrim(input: LibraryTrimRequest, options?: RpcOptions): UnaryCall<LibraryTrimRequest, LibraryTrimReply> {
const method = this.methods[17], opt = this._transport.mergeOptions(options);
return stackIntercept<LibraryTrimRequest, LibraryTrimReply>("unary", this._transport, method, opt, input);
}
}

View File

@@ -181,6 +181,28 @@ export interface TapeDeleteRequest {
*/
export interface TapeDeleteReply {
}
/**
* @generated from protobuf message service.TapeGetPositionsRequest
*/
export interface TapeGetPositionsRequest {
/**
* @generated from protobuf field: int64 id = 1;
*/
id: bigint;
/**
* @generated from protobuf field: string directory = 2;
*/
directory: string;
}
/**
* @generated from protobuf message service.TapeGetPositionsReply
*/
export interface TapeGetPositionsReply {
/**
* @generated from protobuf field: repeated position.Position positions = 1;
*/
positions: Position[];
}
/**
* @generated from protobuf message service.JobListRequest
*/
@@ -374,6 +396,24 @@ export interface LibraryExportReply {
*/
json: Uint8Array;
}
/**
* @generated from protobuf message service.LibraryTrimRequest
*/
export interface LibraryTrimRequest {
/**
* @generated from protobuf field: bool trim_position = 1;
*/
trimPosition: boolean;
/**
* @generated from protobuf field: bool trim_file = 2;
*/
trimFile: boolean;
}
/**
* @generated from protobuf message service.LibraryTrimReply
*/
export interface LibraryTrimReply {
}
// @generated message type with reflection information, may provide speed optimized methods
class FileGetRequest$Type extends MessageType<FileGetRequest> {
constructor() {
@@ -1103,6 +1143,107 @@ class TapeDeleteReply$Type extends MessageType<TapeDeleteReply> {
*/
export const TapeDeleteReply = new TapeDeleteReply$Type();
// @generated message type with reflection information, may provide speed optimized methods
class TapeGetPositionsRequest$Type extends MessageType<TapeGetPositionsRequest> {
constructor() {
super("service.TapeGetPositionsRequest", [
{ no: 1, name: "id", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
{ no: 2, name: "directory", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value?: PartialMessage<TapeGetPositionsRequest>): TapeGetPositionsRequest {
const message = { id: 0n, directory: "" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<TapeGetPositionsRequest>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: TapeGetPositionsRequest): TapeGetPositionsRequest {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 id */ 1:
message.id = reader.int64().toBigInt();
break;
case /* string directory */ 2:
message.directory = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: TapeGetPositionsRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* int64 id = 1; */
if (message.id !== 0n)
writer.tag(1, WireType.Varint).int64(message.id);
/* string directory = 2; */
if (message.directory !== "")
writer.tag(2, WireType.LengthDelimited).string(message.directory);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message service.TapeGetPositionsRequest
*/
export const TapeGetPositionsRequest = new TapeGetPositionsRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class TapeGetPositionsReply$Type extends MessageType<TapeGetPositionsReply> {
constructor() {
super("service.TapeGetPositionsReply", [
{ no: 1, name: "positions", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => Position }
]);
}
create(value?: PartialMessage<TapeGetPositionsReply>): TapeGetPositionsReply {
const message = { positions: [] };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<TapeGetPositionsReply>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: TapeGetPositionsReply): TapeGetPositionsReply {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* repeated position.Position positions */ 1:
message.positions.push(Position.internalBinaryRead(reader, reader.uint32(), options));
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: TapeGetPositionsReply, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* repeated position.Position positions = 1; */
for (let i = 0; i < message.positions.length; i++)
Position.internalBinaryWrite(message.positions[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message service.TapeGetPositionsReply
*/
export const TapeGetPositionsReply = new TapeGetPositionsReply$Type();
// @generated message type with reflection information, may provide speed optimized methods
class JobListRequest$Type extends MessageType<JobListRequest> {
constructor() {
super("service.JobListRequest", [
@@ -2018,6 +2159,86 @@ class LibraryExportReply$Type extends MessageType<LibraryExportReply> {
* @generated MessageType for protobuf message service.LibraryExportReply
*/
export const LibraryExportReply = new LibraryExportReply$Type();
// @generated message type with reflection information, may provide speed optimized methods
class LibraryTrimRequest$Type extends MessageType<LibraryTrimRequest> {
constructor() {
super("service.LibraryTrimRequest", [
{ no: 1, name: "trim_position", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "trim_file", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
]);
}
create(value?: PartialMessage<LibraryTrimRequest>): LibraryTrimRequest {
const message = { trimPosition: false, trimFile: false };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<LibraryTrimRequest>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LibraryTrimRequest): LibraryTrimRequest {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool trim_position */ 1:
message.trimPosition = reader.bool();
break;
case /* bool trim_file */ 2:
message.trimFile = reader.bool();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: LibraryTrimRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* bool trim_position = 1; */
if (message.trimPosition !== false)
writer.tag(1, WireType.Varint).bool(message.trimPosition);
/* bool trim_file = 2; */
if (message.trimFile !== false)
writer.tag(2, WireType.Varint).bool(message.trimFile);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message service.LibraryTrimRequest
*/
export const LibraryTrimRequest = new LibraryTrimRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class LibraryTrimReply$Type extends MessageType<LibraryTrimReply> {
constructor() {
super("service.LibraryTrimReply", []);
}
create(value?: PartialMessage<LibraryTrimReply>): LibraryTrimReply {
const message = {};
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<LibraryTrimReply>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LibraryTrimReply): LibraryTrimReply {
return target ?? this.create();
}
internalBinaryWrite(message: LibraryTrimReply, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message service.LibraryTrimReply
*/
export const LibraryTrimReply = new LibraryTrimReply$Type();
/**
* @generated ServiceType for protobuf service service.Service
*/
@@ -2029,6 +2250,7 @@ export const Service = new ServiceType("service.Service", [
{ name: "FileListParents", options: {}, I: FileListParentsRequest, O: FileListParentsReply },
{ name: "TapeList", options: {}, I: TapeListRequest, O: TapeListReply },
{ name: "TapeDelete", options: {}, I: TapeDeleteRequest, O: TapeDeleteReply },
{ name: "TapeGetPositions", options: {}, I: TapeGetPositionsRequest, O: TapeGetPositionsReply },
{ name: "JobList", options: {}, I: JobListRequest, O: JobListReply },
{ name: "JobCreate", options: {}, I: JobCreateRequest, O: JobCreateReply },
{ name: "JobDelete", options: {}, I: JobDeleteRequest, O: JobDeleteReply },
@@ -2037,5 +2259,6 @@ export const Service = new ServiceType("service.Service", [
{ name: "JobGetLog", options: {}, I: JobGetLogRequest, O: JobGetLogReply },
{ name: "SourceList", options: {}, I: SourceListRequest, O: SourceListReply },
{ name: "DeviceList", options: {}, I: DeviceListRequest, O: DeviceListReply },
{ name: "LibraryExport", options: {}, I: LibraryExportRequest, O: LibraryExportReply }
{ name: "LibraryExport", options: {}, I: LibraryExportRequest, O: LibraryExportReply },
{ name: "LibraryTrim", options: {}, I: LibraryTrimRequest, O: LibraryTrimReply }
]);

View File

@@ -34,7 +34,8 @@ import { TreeView, TreeItem } from "@mui/x-tree-view";
import ExpandMoreIcon from "@mui/icons-material/ExpandMore";
import ChevronRightIcon from "@mui/icons-material/ChevronRight";
import { cli, sleep, fileBase } from "../api";
import { cli, fileBase } from "../api";
import { sleep } from "../tools";
import { Job, JobDisplay, JobListRequest, JobNextRequest, JobStatus, CopyStatus, LibraryEntityType, JobDeleteRequest } from "../entity";
import { JobArchiveCopyingParam, JobArchiveStep, JobArchiveDisplay, JobArchiveState } from "../entity";
@@ -54,8 +55,20 @@ export const JobsBrowser = () => {
const [jobs, setJobs] = useState<DisplayableJob[] | null>(null);
const refresh = useCallback(async () => {
const jobReplys = await cli.jobList(JobListRequest.create({ param: { oneofKind: "list", list: {} } })).response;
const displayReplys = await Promise.all(jobReplys.jobs.map((job) => cli.jobDisplay({ id: job.id }).response));
const targets = jobReplys.jobs.map((job, idx) => ({ ...job, ...displayReplys[idx].display }));
const displays = new Map<BigInt, JobDisplay>();
for (const reply of await Promise.all(
jobReplys.jobs
.filter((job) => job.status === JobStatus.PROCESSING)
.map((job) => cli.jobDisplay({ id: job.id }).response.then((reply) => ({ ...reply, jobID: job.id }))),
)) {
if (!reply.display) {
continue;
}
displays.set(reply.jobID, reply.display);
}
const targets = jobReplys.jobs.map((job) => ({ ...job, ...displays.get(job.id) }));
console.log("refresh jobs list, ", targets);
setJobs(targets);
}, [setJobs]);

View File

@@ -6,48 +6,78 @@ import Box from "@mui/material/Box";
import { FileBrowser, FileNavbar, FileToolbar, FileList, FileContextMenu, FileArray, FileBrowserHandle } from "@aperturerobotics/chonky";
import { ChonkyActions, ChonkyFileActionData, FileData } from "@aperturerobotics/chonky";
import { cli, Root } from "../api";
import { TapeListRequest, Source, Tape } from "../entity";
import { cli, Root, convertTapes, convertPositions } from "../api";
import { TapeListRequest, Source, Tape, Position } from "../entity";
export const TapesType = "tapes";
const convertTapes = (tapes: Array<Tape>): FileData[] => {
return tapes.map((tape) => {
// const isDir = (file.mode & ModeDir) > 0;
return {
id: `${tape.id}`,
name: tape.barcode,
ext: "",
isDir: true,
isHidden: false,
openable: false,
selectable: true,
draggable: true,
droppable: false,
size: 0,
modDate: moment.unix(Number(tape.createTime)).toDate(),
};
});
};
const useTapesSourceBrowser = (source: RefObject<FileBrowserHandle>) => {
const [files, setFiles] = useState<FileArray>(Array(1).fill(null));
const [folderChain, setFolderChan] = useState<FileArray>([Root]);
const current = useMemo(() => {
if (folderChain.length === 0) {
return Root;
}
const openFolder = useCallback(async (id: string) => {
const reply = await cli.tapeList({ param: { oneofKind: "list", list: { offset: 0n, limit: 1000n } } }).response;
const last = folderChain.slice(-1)[0];
if (!last) {
return Root;
}
setFiles(convertTapes(reply.tapes));
setFolderChan([Root]);
}, []);
return last;
}, [folderChain]);
const openFolder = useCallback(
async (target: FileData) => {
if (target.id === Root.id) {
const reply = await cli.tapeList({ param: { oneofKind: "list", list: { offset: 0n, limit: 1000n } } }).response;
setFiles(convertTapes(reply.tapes));
setFolderChan([Root]);
return;
}
const id = target.id;
var tapeIDStr = id;
var dir = "";
const splitIdx = tapeIDStr.indexOf(":");
if (splitIdx >= 0) {
dir = tapeIDStr.slice(splitIdx + 1);
tapeIDStr = tapeIDStr.slice(0, splitIdx);
}
const reply = await cli.tapeGetPositions({ id: BigInt(tapeIDStr), directory: dir }).response;
const files = convertPositions(reply.positions);
console.log("refresh jobs list, target= ", target, "tape_id= ", tapeIDStr, "dir= ", dir, "reply= ", reply, "files= ", files);
setFiles(files);
const targetFolderChain = [];
for (const folder of folderChain) {
if (!folder) {
continue;
}
if (folder.id === target.id) {
targetFolderChain.push(folder);
setFolderChan(targetFolderChain);
return;
}
targetFolderChain.push(folder);
}
targetFolderChain.push(target);
setFolderChan(targetFolderChain);
return;
},
[folderChain],
);
useEffect(() => {
openFolder(Root.id);
openFolder(Root);
}, []);
const onFileAction = useCallback(
(data: ChonkyFileActionData) => {
console.log("source", data);
switch (data.id) {
case ChonkyActions.OpenFiles.id:
(async () => {
@@ -59,7 +89,7 @@ const useTapesSourceBrowser = (source: RefObject<FileBrowserHandle>) => {
}
if (fileToOpen.isDir) {
await openFolder(fileToOpen.id);
await openFolder(fileToOpen);
return;
}
})();
@@ -67,7 +97,12 @@ const useTapesSourceBrowser = (source: RefObject<FileBrowserHandle>) => {
return;
case ChonkyActions.DeleteFiles.id:
(async () => {
await cli.tapeDelete({ ids: data.state.selectedFiles.map((file) => BigInt(file.id)) });
const targetTapes = data.state.selectedFiles;
if (!confirm(`Following tapes will be deleted, may cause data loss. Are you sure?\n${targetTapes.map((tape) => tape.name).join(", ")}`)) {
return;
}
await cli.tapeDelete({ ids: targetTapes.filter((file) => file.isTape).map((file) => BigInt(file.id)) });
await openFolder(current);
})();
return;
}

View File

@@ -22,3 +22,8 @@ export const download = (buf: Uint8Array, filename: string, contentType: string)
link.download = filename;
link.click();
};
export const sleep = (ms: number): Promise<null> =>
new Promise((resolve) => {
setTimeout(resolve, ms);
});

2
go.mod
View File

@@ -18,7 +18,7 @@ require (
github.com/modern-go/reflect2 v1.0.2
github.com/rifflock/lfshook v0.0.0-20180920164130-b9218ef580f5
github.com/samber/lo v1.38.1
github.com/samuelncui/acp v0.0.0-20230927193628-457f88d5268d
github.com/samuelncui/acp v0.0.0-20230929123032-b9f8584ad50c
github.com/sirupsen/logrus v1.9.3
google.golang.org/grpc v1.53.0
google.golang.org/protobuf v1.30.0

4
go.sum
View File

@@ -362,6 +362,10 @@ github.com/samuelncui/acp v0.0.0-20230927173814-44b54705fc4b h1:wfi5H9nbag1rnUM5
github.com/samuelncui/acp v0.0.0-20230927173814-44b54705fc4b/go.mod h1:HDBJGNFN6yd3kWuCU5eKaCICvmCwVWb6AzFS+wSKyWQ=
github.com/samuelncui/acp v0.0.0-20230927193628-457f88d5268d h1:/xwkO9zlY8TMcG+asORTXWEqKY9tD4wEx4kb3q/7TNY=
github.com/samuelncui/acp v0.0.0-20230927193628-457f88d5268d/go.mod h1:HDBJGNFN6yd3kWuCU5eKaCICvmCwVWb6AzFS+wSKyWQ=
github.com/samuelncui/acp v0.0.0-20230928143329-dd07ebc94c58 h1:Mgc3xitaiqsbL6hNEUzic5JCESmEQ3Ll+KdJEwMniGs=
github.com/samuelncui/acp v0.0.0-20230928143329-dd07ebc94c58/go.mod h1:HDBJGNFN6yd3kWuCU5eKaCICvmCwVWb6AzFS+wSKyWQ=
github.com/samuelncui/acp v0.0.0-20230929123032-b9f8584ad50c h1:xJVq1UOaqjI3JVGUQvT+w6584UdEBGzxy7WN8XXuSnk=
github.com/samuelncui/acp v0.0.0-20230929123032-b9f8584ad50c/go.mod h1:HDBJGNFN6yd3kWuCU5eKaCICvmCwVWb6AzFS+wSKyWQ=
github.com/samuelncui/godf v0.0.0-20230927093204-37ea5acb9fc1 h1:K2m4b66nzupWlkfUPJKIw2tgz4aDociv5XwtlynwbzI=
github.com/samuelncui/godf v0.0.0-20230927093204-37ea5acb9fc1/go.mod h1:lGc26yUHA5Fr2Cm/FzlkwCQJ9VtBUK9cue56biDDnWo=
github.com/schollz/progressbar/v3 v3.13.1 h1:o8rySDYiQ59Mwzy2FELeHY5ZARXZTVJC7iHD6PEFUiE=

View File

@@ -138,13 +138,13 @@ main() {
exit 1
fi
mkdir -p /opt/ltfs
mkdir -p /opt/yatm
tar -xvzf ${GZIP_FILE} -C /opt/yatm
if [[ ! -f '/opt/yatm/config.yaml' ]]; then
cp /opt/yatm/config.example.yaml /opt/yatm/config.yaml
echo "Copy example config to /opt/yatm/config.yaml, you may edit it later"
cp /opt/yatm/config.example.yaml /opt/yatm/config.yaml;
vim /opt/yatm/config.yaml;
echo "Copy example config to /opt/yatm/config.yaml, you may edit it later";
fi
systemctl daemon-reload

View File

@@ -5,6 +5,7 @@ import (
"encoding/json"
"fmt"
mapset "github.com/deckarep/golang-set/v2"
"github.com/modern-go/reflect2"
"github.com/samber/lo"
"github.com/samuelncui/yatm/entity"
@@ -98,6 +99,90 @@ func (l *Library) Import(ctx context.Context, buf []byte) error {
return nil
}
func (l *Library) Trim(ctx context.Context, position, file bool) error {
if !position {
return nil
}
var current int64
for {
positions := make([]*Position, 0, batchSize)
if r := l.db.WithContext(ctx).Where("id > ?", current).Order("id ASC").Limit(batchSize).Find(&positions); r.Error != nil {
return fmt.Errorf("scan position fail, err= %w", r.Error)
}
if len(positions) == 0 {
break
}
current = positions[len(positions)-1].ID
tapeIDs := mapset.NewThreadUnsafeSetWithSize[int64](1)
for _, posi := range positions {
tapeIDs.Add(posi.TapeID)
}
tapes, err := l.MGetTape(ctx, tapeIDs.ToSlice()...)
if err != nil {
return fmt.Errorf("mget tape fail, %w", err)
}
needDelete := make([]int64, 0)
for _, posi := range positions {
if tape, has := tapes[posi.TapeID]; has && tape != nil {
continue
}
needDelete = append(needDelete, posi.ID)
}
if len(needDelete) == 0 {
continue
}
if err := l.DeletePositions(ctx, needDelete...); err != nil {
return fmt.Errorf("delete position fail, %w", err)
}
}
if !file {
return nil
}
current = 0
for {
files := make([]*File, 0, batchSize)
if r := l.db.WithContext(ctx).Where("id > ?", current).Order("id ASC").Limit(batchSize).Find(&files); r.Error != nil {
return fmt.Errorf("scan file fail, err= %w", r.Error)
}
if len(files) == 0 {
break
}
current = files[len(files)-1].ID
fileIDs := lo.Map(files, func(f *File, _ int) int64 { return f.ID })
positions, err := l.MGetPositionByFileID(ctx, fileIDs...)
if err != nil {
return fmt.Errorf("mget position by file id fail, %w", err)
}
needDelete := make([]int64, 0)
for _, file := range files {
if posis, has := positions[file.ID]; has && len(posis) > 0 {
continue
}
needDelete = append(needDelete, file.ID)
}
if len(needDelete) == 0 {
continue
}
if r := l.db.WithContext(ctx).Where("id IN (?)", needDelete).Delete(ModelFile); r.Error != nil {
return fmt.Errorf("delete files fail, err= %w", r.Error)
}
}
return nil
}
func listAll[T any](ctx context.Context, l *Library, items []T) ([]T, error) {
v := new(T)
id := reflect2.TypeOfPtr(*v).Elem().(reflect2.StructType).FieldByName("ID")

View File

@@ -3,7 +3,13 @@ package library
import (
"context"
"fmt"
"io/fs"
"sort"
"strings"
"time"
"github.com/samuelncui/yatm/resource"
"github.com/samuelncui/yatm/tools"
)
var (
@@ -23,6 +29,10 @@ type Position struct {
Hash []byte `gorm:"type:varbinary(32)" json:"hash,omitempty"` // sha256
}
func (l *Library) SavePosition(ctx context.Context, posi *Position) error {
return l.db.WithContext(ctx).Save(posi).Error
}
func (l *Library) GetPositionByFileID(ctx context.Context, fileID int64) ([]*Position, error) {
results, err := l.MGetPositionByFileID(ctx, fileID)
if err != nil {
@@ -48,3 +58,63 @@ func (l *Library) MGetPositionByFileID(ctx context.Context, fileIDs ...int64) (m
return results, nil
}
func (l *Library) ListPositions(ctx context.Context, tapeID int64, prefix string) ([]*Position, error) {
positions := make([]*Position, 0, 128)
if r := l.db.WithContext(ctx).Where("tape_id = ? AND path LIKE ?", tapeID, resource.SQLEscape(prefix)+"%").Order("path ASC").Find(&positions); r.Error != nil {
return nil, fmt.Errorf("find position by file id fail, %w", r.Error)
}
convertPath := tools.Cache(func(p string) string { return strings.ReplaceAll(p, "/", "\x00") })
sort.Slice(positions, func(i int, j int) bool {
return convertPath(positions[i].Path) < convertPath(positions[j].Path)
})
filtered := make([]*Position, 0, 128)
for _, posi := range positions {
if !strings.HasPrefix(posi.Path, prefix) {
continue
}
suffix := posi.Path[len(prefix):]
idx := strings.IndexRune(suffix, '/')
if idx < 0 {
filtered = append(filtered, posi)
continue
}
path := prefix + suffix[:idx+1]
if len(filtered) > 0 && filtered[len(filtered)-1].Path == path {
target := filtered[len(filtered)-1]
target.Size += posi.Size
if target.ModTime.Before(posi.ModTime) {
target.ModTime = posi.ModTime
}
if target.WriteTime.Before(posi.WriteTime) {
target.WriteTime = posi.WriteTime
}
continue
}
filtered = append(filtered, &Position{
TapeID: posi.TapeID,
Path: path,
Mode: uint32(fs.ModeDir | fs.ModePerm),
ModTime: posi.ModTime,
WriteTime: posi.WriteTime,
Size: posi.Size,
})
}
return filtered, nil
}
func (l *Library) DeletePositions(ctx context.Context, ids ...int64) error {
if r := l.db.WithContext(ctx).Where("id IN (?)", ids).Delete(ModelPosition); r.Error != nil {
return fmt.Errorf("delete positions fail, err= %w", r.Error)
}
return nil
}

View File

@@ -17,3 +17,44 @@ func NewDBConn(dialect, dsn string) (*gorm.DB, error) {
return gorm.Open(dialector)
}
func SQLEscape(sql string) string {
dest := make([]byte, 0, 2*len(sql))
var escape byte
for i := 0; i < len(sql); i++ {
c := sql[i]
escape = 0
switch c {
case 0: /* Must be escaped for 'mysql' */
escape = '0'
break
case '\n': /* Must be escaped for logs */
escape = 'n'
break
case '\r':
escape = 'r'
break
case '\\':
escape = '\\'
break
case '\'':
escape = '\''
break
case '"': /* Better safe than sorry */
escape = '"'
break
case '\032': //十进制26,八进制32,十六进制1a, /* This gives problems on Win32 */
escape = 'Z'
}
if escape != 0 {
dest = append(dest, '\\', escape)
} else {
dest = append(dest, c)
}
}
return string(dest)
}

View File

@@ -1,6 +1,7 @@
#!/usr/bin/env bash
set -ex;
mt -f ${DEVICE} load
sleep 5
for i in {1..60}; do

24
scripts/get_device Executable file
View File

@@ -0,0 +1,24 @@
#!/usr/bin/env bash
set -e;
DEVICE=`readlink -f ${DEVICE}`
REGEXP='/dev/n?st([0-9]+)[alm]?'
if [[ ! $DEVICE =~ $REGEXP ]]; then
echo "'$DEVICE' doesn't match" >&2
exit 1
fi
NUM="${BASH_REMATCH[1]}"
TRIES=("/dev/nst${NUM}" "/dev/st${NUM}")
for TRY in ${TRIES[@]}; do
SG_DEVICE=`sg_map | grep ${TRY} || echo ''`;
if [[ $SG_DEVICE != "" ]]; then
echo $SG_DEVICE | awk '{print $1}'
exit 0;
fi
done
echo "'$DEVICE' not found" >&2
exit 1

View File

@@ -1,6 +1,8 @@
#!/usr/bin/env bash
set -ex;
SG_DEVICE=`sg_map | grep ${DEVICE} | awk '{print $1}'`
CURDIR=$(cd $(dirname $0); pwd);
SG_DEVICE=`${CURDIR}/get_device`
mkltfs -f -d ${SG_DEVICE} -s ${TAPE_BARCODE} -n ${TAPE_NAME}
sleep 3

View File

@@ -1,6 +1,16 @@
#!/usr/bin/env bash
set -ex;
SG_DEVICE=`sg_map | grep ${DEVICE} | awk '{print $1}'`
ltfs -o devname=${SG_DEVICE} -o noatime -o sync_type=unmount -o work_directory=/opt/ltfs -o capture_index -o min_pool_size=256 -o max_pool_size=1024 -o eject -s ${MOUNT_POINT}
CURDIR=$(cd $(dirname $0); pwd);
SG_DEVICE=`${CURDIR}/get_device`
ltfs -o devname=${SG_DEVICE} -o noatime -o sync_type=unmount -o work_directory=/opt/yatm/captured_indices -o capture_index -o min_pool_size=256 -o max_pool_size=1024 -o eject -s ${MOUNT_POINT}
sleep 3
MOUNT_POINT_TARGET=`df ${MOUNT_POINT} --output=target | sed -n '1!p'`
if [[ $MOUNT_POINT != $MOUNT_POINT_TARGET ]]; then
echo "mount '$MOUNT_POINT' fail, current target is '$MOUNT_POINT_TARGET'" >&2
exit 1
fi
echo "mount '$MOUNT_POINT' success" >&2

16
scripts/mount.openltfs Executable file
View File

@@ -0,0 +1,16 @@
#!/usr/bin/env bash
set -ex;
CURDIR=$(cd $(dirname $0); pwd);
SG_DEVICE=`${CURDIR}/get_device`
ltfs -o devname=${SG_DEVICE} -o noatime -o sync_type=unmount -o work_directory=/opt/yatm/captured_indices -o capture_index=/opt/yatm/captured_indices -o min_pool_size=256 -o max_pool_size=1024 -o eject -s ${MOUNT_POINT}
sleep 3
MOUNT_POINT_TARGET=`df ${MOUNT_POINT} --output=target | sed -n '1!p'`
if [[ $MOUNT_POINT != $MOUNT_POINT_TARGET ]]; then
echo "mount '$MOUNT_POINT' fail, current target is '$MOUNT_POINT_TARGET'" >&2
exit 1
fi
echo "mount '$MOUNT_POINT' success" >&2

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env bash
set -ex;
# SG_DEVICE=`sg_map | grep ${DEVICE} | awk '{print $1}'`
BARCODE=`./yatm-lto-info -f /dev/nst0 | grep 'Barcode' | awk '{print $3}'`
mt -f ${DEVICE} load
BARCODE=`./yatm-lto-info -f ${DEVICE} | grep 'Barcode' | awk '{print $3}'`
echo "{\"barcode\": \"$BARCODE\"}" > $OUT
sleep 3