parent
732f728ff8
commit
7c11ba18fd
@ -0,0 +1,2 @@
|
|||||||
|
node_modules
|
||||||
|
/autorestic
|
@ -0,0 +1,14 @@
|
|||||||
|
# Rename backend to repository
|
||||||
|
|
||||||
|
# Env variables
|
||||||
|
|
||||||
|
AUTORESTIC_BB_B2_ACCOUNT_ID=123 -> AUTORESTIC_BACKENDS_BB_ENV_B2**ACCOUNT**ID=123
|
||||||
|
|
||||||
|
- All fields can be configured by env now
|
||||||
|
- To escape `_` replace it with double underscore `__`
|
||||||
|
|
||||||
|
# Rest property on backend config
|
||||||
|
|
||||||
|
No rest property anymore, can be used in string extrapolation
|
||||||
|
|
||||||
|
# Every string is now replaceable with env variables
|
@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"private": true,
|
||||||
|
"name": "autorestic",
|
||||||
|
"module": "src/index.ts",
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"schema:gen": "bun run ./scripts/generateSchema.ts",
|
||||||
|
"bin": "bun build ./src/index.ts --compile --outfile autorestic"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"bun-types": "^0.6.0",
|
||||||
|
"typescript": "^5.0.0",
|
||||||
|
"zod-to-json-schema": "^3.21.2"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@commander-js/extra-typings": "^11.0.0",
|
||||||
|
"commander": "^11.0.0",
|
||||||
|
"pino": "^8.14.1",
|
||||||
|
"pino-pretty": "^10.0.0",
|
||||||
|
"yaml": "^2.3.1",
|
||||||
|
"zod": "^3.21.4"
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,252 @@
|
|||||||
|
{
|
||||||
|
"$ref": "#/definitions/mySchema",
|
||||||
|
"definitions": {
|
||||||
|
"mySchema": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"version": {
|
||||||
|
"type": "number",
|
||||||
|
"description": "version number"
|
||||||
|
},
|
||||||
|
"repos": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"type": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"local",
|
||||||
|
"sftp",
|
||||||
|
"rest",
|
||||||
|
"swift",
|
||||||
|
"s3",
|
||||||
|
"b2",
|
||||||
|
"azure",
|
||||||
|
"gs",
|
||||||
|
"rclone"
|
||||||
|
],
|
||||||
|
"description": "type of repository"
|
||||||
|
},
|
||||||
|
"path": {
|
||||||
|
"type": "string",
|
||||||
|
"minLength": 1,
|
||||||
|
"description": "restic path"
|
||||||
|
},
|
||||||
|
"key": {
|
||||||
|
"$ref": "#/definitions/mySchema/properties/repos/additionalProperties/properties/path",
|
||||||
|
"description": "encryption key for the repository"
|
||||||
|
},
|
||||||
|
"env": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"$ref": "#/definitions/mySchema/properties/repos/additionalProperties/properties/path",
|
||||||
|
"description": "value of the environment variable"
|
||||||
|
},
|
||||||
|
"description": "environment variables"
|
||||||
|
},
|
||||||
|
"options": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"all": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"const": true,
|
||||||
|
"description": "boolean flag"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/mySchema/properties/repos/additionalProperties/properties/path",
|
||||||
|
"description": "non-empty string that can extrapolate env variables inside it"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/definitions/mySchema/properties/repos/additionalProperties/properties/options/properties/all/additionalProperties/anyOf/1/anyOf/0"
|
||||||
|
},
|
||||||
|
"minItems": 1
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": "value of option"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"backup": {
|
||||||
|
"$ref": "#/definitions/mySchema/properties/repos/additionalProperties/properties/options/properties/all"
|
||||||
|
},
|
||||||
|
"forget": {
|
||||||
|
"$ref": "#/definitions/mySchema/properties/repos/additionalProperties/properties/options/properties/all"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"additionalProperties": false,
|
||||||
|
"description": "options"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": [
|
||||||
|
"type",
|
||||||
|
"path",
|
||||||
|
"key"
|
||||||
|
],
|
||||||
|
"additionalProperties": false
|
||||||
|
},
|
||||||
|
"description": "available repositories"
|
||||||
|
},
|
||||||
|
"locations": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"from": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/mySchema/properties/repos/additionalProperties/properties/path",
|
||||||
|
"description": "local path to backup"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/definitions/mySchema/properties/locations/additionalProperties/properties/from/anyOf/0"
|
||||||
|
},
|
||||||
|
"minItems": 1
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"to": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/mySchema/properties/repos/additionalProperties/properties/path",
|
||||||
|
"description": "repository to backup to"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/definitions/mySchema/properties/locations/additionalProperties/properties/to/anyOf/0"
|
||||||
|
},
|
||||||
|
"minItems": 1
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"copy": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/mySchema/properties/repos/additionalProperties/properties/path",
|
||||||
|
"description": "destination repository"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/definitions/mySchema/properties/locations/additionalProperties/properties/copy/additionalProperties/anyOf/0"
|
||||||
|
},
|
||||||
|
"minItems": 1
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"cron": {
|
||||||
|
"$ref": "#/definitions/mySchema/properties/repos/additionalProperties/properties/path",
|
||||||
|
"description": "execute backups for the given cron job"
|
||||||
|
},
|
||||||
|
"hooks": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"before": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/mySchema/properties/repos/additionalProperties/properties/path",
|
||||||
|
"description": "command to be executed"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/definitions/mySchema/properties/locations/additionalProperties/properties/hooks/properties/before/anyOf/0"
|
||||||
|
},
|
||||||
|
"minItems": 1
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": "list of commands"
|
||||||
|
},
|
||||||
|
"after": {
|
||||||
|
"$ref": "#/definitions/mySchema/properties/locations/additionalProperties/properties/hooks/properties/before",
|
||||||
|
"description": "list of commands"
|
||||||
|
},
|
||||||
|
"failure": {
|
||||||
|
"$ref": "#/definitions/mySchema/properties/locations/additionalProperties/properties/hooks/properties/before",
|
||||||
|
"description": "list of commands"
|
||||||
|
},
|
||||||
|
"success": {
|
||||||
|
"$ref": "#/definitions/mySchema/properties/locations/additionalProperties/properties/hooks/properties/before",
|
||||||
|
"description": "list of commands"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"additionalProperties": false,
|
||||||
|
"description": "hooks to be executed"
|
||||||
|
},
|
||||||
|
"options": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"all": {
|
||||||
|
"$ref": "#/definitions/mySchema/properties/repos/additionalProperties/properties/options/properties/all"
|
||||||
|
},
|
||||||
|
"backup": {
|
||||||
|
"$ref": "#/definitions/mySchema/properties/repos/additionalProperties/properties/options/properties/backup"
|
||||||
|
},
|
||||||
|
"forget": {
|
||||||
|
"$ref": "#/definitions/mySchema/properties/repos/additionalProperties/properties/options/properties/forget"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"additionalProperties": false,
|
||||||
|
"description": "native restic options"
|
||||||
|
},
|
||||||
|
"forget": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "boolean",
|
||||||
|
"description": "automatically run \"forget\" when backing up"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"const": "prune",
|
||||||
|
"description": "also prune when forgetting"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": [
|
||||||
|
"from",
|
||||||
|
"to"
|
||||||
|
],
|
||||||
|
"additionalProperties": false,
|
||||||
|
"description": "Location"
|
||||||
|
},
|
||||||
|
"description": "available locations"
|
||||||
|
},
|
||||||
|
"global": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"options": {
|
||||||
|
"$ref": "#/definitions/mySchema/properties/locations/additionalProperties/properties/options",
|
||||||
|
"description": "native restic options"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"additionalProperties": false,
|
||||||
|
"description": "global configuration"
|
||||||
|
},
|
||||||
|
"extras": {}
|
||||||
|
},
|
||||||
|
"required": [
|
||||||
|
"version",
|
||||||
|
"repos",
|
||||||
|
"locations"
|
||||||
|
],
|
||||||
|
"additionalProperties": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"$schema": "http://json-schema.org/draft-07/schema#"
|
||||||
|
}
|
@ -0,0 +1,17 @@
|
|||||||
|
import { mkdir, rm, writeFile } from 'node:fs/promises'
|
||||||
|
import { zodToJsonSchema } from 'zod-to-json-schema'
|
||||||
|
import { ConfigSchema } from '../src/config/schema/config'
|
||||||
|
|
||||||
|
const OUTPUT = './schema'
|
||||||
|
|
||||||
|
await rm(OUTPUT, { recursive: true, force: true })
|
||||||
|
await mkdir(OUTPUT, { recursive: true })
|
||||||
|
|
||||||
|
const Schemas = {
|
||||||
|
config: ConfigSchema,
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [name, schema] of Object.entries(Schemas)) {
|
||||||
|
const jsonSchema = zodToJsonSchema(schema, 'mySchema')
|
||||||
|
await writeFile(`${OUTPUT}/${name}.json`, JSON.stringify(jsonSchema, null, 2), { encoding: 'utf-8' })
|
||||||
|
}
|
@ -0,0 +1,14 @@
|
|||||||
|
import { Log } from '../logger'
|
||||||
|
import { Context } from '../models/context'
|
||||||
|
|
||||||
|
export async function backup(ctx: Context) {
|
||||||
|
const log = Log.child({ cmd: 'check' })
|
||||||
|
log.trace('starting')
|
||||||
|
|
||||||
|
// Locations
|
||||||
|
for (const location of ctx.locations) {
|
||||||
|
await location.backup()
|
||||||
|
}
|
||||||
|
|
||||||
|
log.trace('done')
|
||||||
|
}
|
@ -0,0 +1,25 @@
|
|||||||
|
import { unlockRepo, waitForRepo } from '../lock'
|
||||||
|
import { Log } from '../logger'
|
||||||
|
import { Context } from '../models/context'
|
||||||
|
import { isResticAvailable } from '../restic'
|
||||||
|
|
||||||
|
export async function check(ctx: Context) {
|
||||||
|
const l = Log.child({ cmd: 'check' })
|
||||||
|
l.trace('starting')
|
||||||
|
|
||||||
|
// Restic
|
||||||
|
isResticAvailable()
|
||||||
|
|
||||||
|
// Repos
|
||||||
|
for (const repo of ctx.repos) {
|
||||||
|
await waitForRepo(ctx, repo.name)
|
||||||
|
try {
|
||||||
|
await repo.init()
|
||||||
|
await repo.check()
|
||||||
|
} finally {
|
||||||
|
unlockRepo(ctx, repo.name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
l.trace('done')
|
||||||
|
}
|
@ -0,0 +1,21 @@
|
|||||||
|
import { describe, expect, test } from 'bun:test'
|
||||||
|
import { InvalidEnvFileLine } from '../../errors'
|
||||||
|
import { parseFile } from './file'
|
||||||
|
|
||||||
|
describe('env file', () => {
|
||||||
|
test('simple', () => {
|
||||||
|
expect(parseFile(`test_foo=ok`)).toEqual({ test_foo: 'ok' })
|
||||||
|
})
|
||||||
|
|
||||||
|
test('multiple values', () => {
|
||||||
|
expect(parseFile(`test_foo=ok\n \n spacing = foo \n`)).toEqual({ test_foo: 'ok', spacing: 'foo' })
|
||||||
|
})
|
||||||
|
|
||||||
|
test('invalid: key', () => {
|
||||||
|
expect(() => parseFile(`a=123\na f=ok`)).toThrow(new InvalidEnvFileLine('a f=ok'))
|
||||||
|
})
|
||||||
|
|
||||||
|
test('invalid: missing =', () => {
|
||||||
|
expect(() => parseFile(`a=123\na ok`)).toThrow(new InvalidEnvFileLine('a ok'))
|
||||||
|
})
|
||||||
|
})
|
@ -0,0 +1,55 @@
|
|||||||
|
import { exists, readFile } from 'node:fs/promises'
|
||||||
|
import { InvalidEnvFileLine } from '../../errors'
|
||||||
|
import { setByPath } from '../../utils/path'
|
||||||
|
import { relativePath } from '../resolution'
|
||||||
|
|
||||||
|
export function parseFile(contents: string) {
|
||||||
|
const variables: Record<string, string> = {}
|
||||||
|
const lines = contents
|
||||||
|
.trim()
|
||||||
|
.split('\n')
|
||||||
|
.map((l) => l.trim())
|
||||||
|
const matcher = /^\s*(?<variable>\w+)\s*=(?<value>.*)$/
|
||||||
|
for (const line of lines) {
|
||||||
|
if (!line) continue
|
||||||
|
const match = matcher.exec(line)
|
||||||
|
if (!match) throw new InvalidEnvFileLine(line)
|
||||||
|
variables[match.groups!.variable] = match.groups!.value.trim()
|
||||||
|
}
|
||||||
|
return variables
|
||||||
|
}
|
||||||
|
|
||||||
|
const PREFIX = 'AUTORESTIC_'
|
||||||
|
|
||||||
|
function envVariableToObjectPath(env: string): string {
|
||||||
|
if (env.startsWith(PREFIX)) env = env.replace(PREFIX, '')
|
||||||
|
return (
|
||||||
|
env
|
||||||
|
// Convert to object path
|
||||||
|
.replaceAll('_', '.')
|
||||||
|
// Escape the double unterscore. __ -> .. -> _
|
||||||
|
.replaceAll('..', '_')
|
||||||
|
.toLowerCase()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fill the config file with the env file variables.
|
||||||
|
* These take precedence before the config file itself.
|
||||||
|
*/
|
||||||
|
export async function enrichConfig(rawConfig: any, path: string) {
|
||||||
|
const envFilePath = relativePath(path, '.autorestic.env')
|
||||||
|
let variables: Record<string, string> = {}
|
||||||
|
|
||||||
|
if (await exists(envFilePath)) {
|
||||||
|
const envFile = parseFile(await readFile(envFilePath, 'utf-8'))
|
||||||
|
Object.assign(variables, envFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
Object.assign(variables, process.env)
|
||||||
|
|
||||||
|
for (const [key, value] of Object.entries(variables)) {
|
||||||
|
if (!key.startsWith(PREFIX)) continue
|
||||||
|
setByPath(rawConfig, envVariableToObjectPath(key), value)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,29 @@
|
|||||||
|
import { exists, readFile } from 'node:fs/promises'
|
||||||
|
import yaml from 'yaml'
|
||||||
|
import { ConfigFileNotFound, CustomError, InvalidConfigFile } from '../errors'
|
||||||
|
import { enrichConfig } from './env/file'
|
||||||
|
import { autoLocateConfig } from './resolution'
|
||||||
|
import { Config, ConfigWithMetaSchema } from './schema/config'
|
||||||
|
import { basename } from 'node:path'
|
||||||
|
|
||||||
|
export async function loadConfig(customPath?: string): Promise<Config> {
|
||||||
|
let path: string
|
||||||
|
if (customPath) {
|
||||||
|
path = customPath
|
||||||
|
if (!(await exists(path))) throw new ConfigFileNotFound([path])
|
||||||
|
} else {
|
||||||
|
path = await autoLocateConfig()
|
||||||
|
}
|
||||||
|
|
||||||
|
const rawConfig = await readFile(path, 'utf-8')
|
||||||
|
const config = yaml.parse(rawConfig)
|
||||||
|
await enrichConfig(config, path)
|
||||||
|
config.meta = { path: basename(path) }
|
||||||
|
const parsed = ConfigWithMetaSchema.safeParse(config)
|
||||||
|
if (!parsed.success)
|
||||||
|
throw new InvalidConfigFile(parsed.error.errors.map((e) => `${e.path.join(' > ')}: ${e.message}`))
|
||||||
|
|
||||||
|
// Check for semantics
|
||||||
|
|
||||||
|
return parsed.data
|
||||||
|
}
|
@ -0,0 +1,23 @@
|
|||||||
|
import { exists } from 'node:fs/promises'
|
||||||
|
import { dirname, isAbsolute, join, resolve } from 'node:path'
|
||||||
|
import { ConfigFileNotFound } from '../errors'
|
||||||
|
|
||||||
|
const DEFAULT_DIRS = ['./', '~/', '~/.config/autorestic']
|
||||||
|
const FILENAMES = ['.autorestic.yaml', '.autorestic.yml', '.autorestic.json']
|
||||||
|
|
||||||
|
export async function autoLocateConfig(): Promise<string> {
|
||||||
|
const paths = DEFAULT_DIRS
|
||||||
|
const xdgHome = process.env['XDG_CONFIG_HOME']
|
||||||
|
if (xdgHome) paths.push(xdgHome)
|
||||||
|
for (const path in paths) {
|
||||||
|
for (const filename in FILENAMES) {
|
||||||
|
const file = join(path, filename)
|
||||||
|
if (await exists(file)) return file
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw new ConfigFileNotFound(paths)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function relativePath(base: string, path: string): string {
|
||||||
|
return isAbsolute(path) ? path : resolve(base, path)
|
||||||
|
}
|
@ -0,0 +1,20 @@
|
|||||||
|
import { ZodTypeAny, z } from 'zod'
|
||||||
|
import { Log } from '../../logger'
|
||||||
|
|
||||||
|
export const NonEmptyString = z
|
||||||
|
.string()
|
||||||
|
.min(1)
|
||||||
|
// Extrapolate env variables from a string
|
||||||
|
.transform((s) => {
|
||||||
|
return s.replaceAll(/\$(\w+)|\${(\w+)}/g, (_, g0, g1) => {
|
||||||
|
const variable = g0 || g1
|
||||||
|
const value = process.env[variable] ?? ''
|
||||||
|
if (!value) Log.error(`cannot find environment variable "${variable}" to replace in ${s}`)
|
||||||
|
return value
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.describe('non-empty string that can extrapolate env variables inside it')
|
||||||
|
|
||||||
|
export function OptionallyArray<T extends ZodTypeAny>(type: T) {
|
||||||
|
return z.union([type, z.array(type).min(1)])
|
||||||
|
}
|
@ -0,0 +1,69 @@
|
|||||||
|
import { z } from 'zod'
|
||||||
|
import { asArray } from '../../utils/array'
|
||||||
|
import { RepositorySchema } from './repository'
|
||||||
|
import { NonEmptyString } from './common'
|
||||||
|
import { LocationSchema } from './location'
|
||||||
|
import { OptionsSchema } from './options'
|
||||||
|
|
||||||
|
export const ConfigSchema = z.strictObject({
|
||||||
|
version: z.number().describe('version number'),
|
||||||
|
repos: z.record(NonEmptyString.describe('repository name'), RepositorySchema).describe('available repositories'),
|
||||||
|
locations: z.record(NonEmptyString.describe('location name'), LocationSchema).describe('available locations'),
|
||||||
|
global: z
|
||||||
|
.strictObject({
|
||||||
|
options: OptionsSchema.optional(),
|
||||||
|
})
|
||||||
|
.describe('global configuration')
|
||||||
|
.optional(),
|
||||||
|
extras: z.any().optional(),
|
||||||
|
})
|
||||||
|
|
||||||
|
const ConfigMeta = z
|
||||||
|
.strictObject({
|
||||||
|
path: NonEmptyString.describe('The path of the loaded config'),
|
||||||
|
})
|
||||||
|
.describe('Meta information about the config')
|
||||||
|
|
||||||
|
export const ConfigWithMetaSchema = ConfigSchema.extend({
|
||||||
|
meta: ConfigMeta,
|
||||||
|
}).superRefine((config, ctx) => {
|
||||||
|
const availableRepos = Object.keys(config.repos)
|
||||||
|
for (const [name, location] of Object.entries(config.locations)) {
|
||||||
|
const locationPath = [...ctx.path, 'locations', name]
|
||||||
|
const toRepos = asArray(location.to)
|
||||||
|
// Check if all target repos are valid
|
||||||
|
for (const to of toRepos) {
|
||||||
|
if (!availableRepos.includes(to)) {
|
||||||
|
const message = `location "${name}" has an invalid repository "${to}"`
|
||||||
|
ctx.addIssue({ message, code: 'custom', path: [...locationPath, 'to'] })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Check copy field
|
||||||
|
if (!location.copy) continue
|
||||||
|
for (const [source, destinations] of Object.entries(location.copy)) {
|
||||||
|
const path = [...locationPath, 'copy', source]
|
||||||
|
if (!toRepos.includes(source))
|
||||||
|
ctx.addIssue({
|
||||||
|
code: 'custom',
|
||||||
|
path,
|
||||||
|
message: `copy source "${source}" must be also a backup target`,
|
||||||
|
})
|
||||||
|
for (const destination of asArray(destinations)) {
|
||||||
|
if (destination === source)
|
||||||
|
ctx.addIssue({
|
||||||
|
code: 'custom',
|
||||||
|
path: [...path, destination],
|
||||||
|
message: `destination repository "${destination}" cannot be also the source in copy field`,
|
||||||
|
})
|
||||||
|
if (!availableRepos.includes(destination))
|
||||||
|
ctx.addIssue({
|
||||||
|
code: 'custom',
|
||||||
|
path: [...path, destination],
|
||||||
|
message: `destination repository "${destination}" does not exist`,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
export type Config = z.infer<typeof ConfigWithMetaSchema>
|
@ -0,0 +1,14 @@
|
|||||||
|
import { z } from 'zod'
|
||||||
|
import { NonEmptyString, OptionallyArray } from './common'
|
||||||
|
|
||||||
|
const Command = NonEmptyString.describe('command to be executed')
|
||||||
|
const Commands = OptionallyArray(Command).describe('list of commands')
|
||||||
|
|
||||||
|
export const HooksSchema = z
|
||||||
|
.strictObject({
|
||||||
|
before: Commands.optional(),
|
||||||
|
after: Commands.optional(),
|
||||||
|
failure: Commands.optional(),
|
||||||
|
success: Commands.optional(),
|
||||||
|
})
|
||||||
|
.describe('hooks to be executed')
|
@ -0,0 +1,28 @@
|
|||||||
|
import { z } from 'zod'
|
||||||
|
import { NonEmptyString, OptionallyArray } from './common'
|
||||||
|
import { HooksSchema } from './hooks'
|
||||||
|
import { OptionsSchema } from './options'
|
||||||
|
|
||||||
|
export const LocationSchema = z
|
||||||
|
.strictObject({
|
||||||
|
from: OptionallyArray(NonEmptyString.describe('local path to backup')),
|
||||||
|
to: OptionallyArray(NonEmptyString.describe('repository to backup to')),
|
||||||
|
copy: z
|
||||||
|
.record(
|
||||||
|
NonEmptyString.describe('source repository from which to copy from'),
|
||||||
|
OptionallyArray(NonEmptyString.describe('destination repository'))
|
||||||
|
)
|
||||||
|
.optional(),
|
||||||
|
|
||||||
|
// adapter:
|
||||||
|
cron: NonEmptyString.describe('execute backups for the given cron job').optional(),
|
||||||
|
hooks: HooksSchema.optional(),
|
||||||
|
options: OptionsSchema.optional(),
|
||||||
|
forget: z
|
||||||
|
.union([
|
||||||
|
z.boolean().describe('automatically run "forget" when backing up'),
|
||||||
|
z.literal('prune').describe('also prune when forgetting'),
|
||||||
|
])
|
||||||
|
.optional(),
|
||||||
|
})
|
||||||
|
.describe('Location')
|
@ -0,0 +1,15 @@
|
|||||||
|
import { z } from 'zod'
|
||||||
|
import { NonEmptyString, OptionallyArray } from './common'
|
||||||
|
|
||||||
|
const OptionSchema = z.record(
|
||||||
|
NonEmptyString.describe('native restic option'),
|
||||||
|
z.union([z.literal(true).describe('boolean flag'), OptionallyArray(NonEmptyString)]).describe('value of option')
|
||||||
|
)
|
||||||
|
|
||||||
|
export const OptionsSchema = z
|
||||||
|
.strictObject({
|
||||||
|
all: OptionSchema.optional(),
|
||||||
|
backup: OptionSchema.optional(),
|
||||||
|
forget: OptionSchema.optional(),
|
||||||
|
})
|
||||||
|
.describe('native restic options')
|
@ -0,0 +1,18 @@
|
|||||||
|
import { z } from 'zod'
|
||||||
|
import { NonEmptyString } from './common'
|
||||||
|
import { OptionsSchema } from './options'
|
||||||
|
|
||||||
|
export const RepositorySchema = z.strictObject({
|
||||||
|
type: z.enum(['local', 'sftp', 'rest', 'swift', 's3', 'b2', 'azure', 'gs', 'rclone']).describe('type of repository'),
|
||||||
|
path: NonEmptyString.describe('restic path'),
|
||||||
|
key: NonEmptyString.describe('encryption key for the repository'),
|
||||||
|
env: z
|
||||||
|
.record(
|
||||||
|
NonEmptyString.describe('environment variable'),
|
||||||
|
NonEmptyString.describe('value of the environment variable')
|
||||||
|
)
|
||||||
|
.transform((env) => Object.fromEntries(Object.entries(env).map(([key, value]) => [key.toUpperCase(), value])))
|
||||||
|
.describe('environment variables')
|
||||||
|
.optional(),
|
||||||
|
options: OptionsSchema.describe('options').optional(),
|
||||||
|
})
|
@ -0,0 +1,47 @@
|
|||||||
|
function formatLines(lines: string[]) {
|
||||||
|
return lines.map((p) => ` ▶ ${p}`).join('\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
export class CustomError extends Error {}
|
||||||
|
|
||||||
|
export class InvalidEnvFileLine extends CustomError {
|
||||||
|
constructor(line: string) {
|
||||||
|
super(`invalid env file line: "${line}"`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class NotImplemented extends CustomError {
|
||||||
|
constructor(functionality: string) {
|
||||||
|
super(`not implemented: ${functionality}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class ConfigFileNotFound extends CustomError {
|
||||||
|
constructor(paths: string[]) {
|
||||||
|
super(`could not locate config file.\nthe following paths were tried:\n${formatLines(paths)}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class InvalidConfigFile extends CustomError {
|
||||||
|
constructor(errors: string[]) {
|
||||||
|
super(`could not parse the config file.\n${formatLines(errors)}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class BinaryNotAvailable extends CustomError {
|
||||||
|
constructor(binary: string) {
|
||||||
|
super(`binary "${binary}" is not available in $PATH`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class ResticError extends CustomError {
|
||||||
|
constructor(errors: string[]) {
|
||||||
|
super(`internal restic error.\n${formatLines(errors)}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class LockfileAlreadyLocked extends CustomError {
|
||||||
|
constructor(repo: string) {
|
||||||
|
super(`cannot acquire lock for repository "${repo}", already in use`)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,138 @@
|
|||||||
|
import { Command, Help, Option, program } from '@commander-js/extra-typings'
|
||||||
|
import { loadConfig } from './config'
|
||||||
|
import { CustomError, NotImplemented } from './errors'
|
||||||
|
import { Log, LogLevel, setLevelFromFlag } from './logger'
|
||||||
|
import { check } from './cmd/check'
|
||||||
|
import { Context } from './models/context'
|
||||||
|
import { backup } from './cmd/backup'
|
||||||
|
|
||||||
|
export const helpConfig: Partial<Help> = {
|
||||||
|
showGlobalOptions: true,
|
||||||
|
sortOptions: true,
|
||||||
|
sortSubcommands: true,
|
||||||
|
helpWidth: 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
program
|
||||||
|
.name('autorestic')
|
||||||
|
.description('configuration manager and runner for restic')
|
||||||
|
.version('2.0.0-alpha.0')
|
||||||
|
.configureHelp(helpConfig)
|
||||||
|
.allowExcessArguments(false)
|
||||||
|
.allowUnknownOption(false)
|
||||||
|
|
||||||
|
// Global options
|
||||||
|
program.option('-c, --config <file>', 'specify custom configuration file')
|
||||||
|
program.option('-v', 'verbosity', (_, previous) => previous + 1, 1)
|
||||||
|
program.addOption(new Option('--ci', 'CI mode').env('CI').default(false))
|
||||||
|
|
||||||
|
// Common Options
|
||||||
|
const specificLocation = new Option('-l, --location <locations...>', 'location name, multiple possible')
|
||||||
|
specificLocation.variadic = true
|
||||||
|
const allLocations = new Option('-a, --all', 'all locations')
|
||||||
|
const specificRepo = new Option('-r, --repository <names...>', 'repository name, multiple possible')
|
||||||
|
specificLocation.variadic = true
|
||||||
|
const allRepos = new Option('-a, --all', 'all repositories')
|
||||||
|
|
||||||
|
function mergeOptions<T extends {}>(local: T, p: Command) {
|
||||||
|
const globals = p.optsWithGlobals() as { config?: string; verbosity: number; ci: boolean }
|
||||||
|
return {
|
||||||
|
...globals,
|
||||||
|
...local,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
program.hook('preAction', (command) => {
|
||||||
|
// @ts-ignore
|
||||||
|
const v: number = command.opts().v
|
||||||
|
setLevelFromFlag(v)
|
||||||
|
})
|
||||||
|
|
||||||
|
program
|
||||||
|
.command('check')
|
||||||
|
.description('check if the config is valid and sets up the repositories')
|
||||||
|
.configureHelp(helpConfig)
|
||||||
|
.action(async (options, p) => {
|
||||||
|
const merged = mergeOptions(options, p)
|
||||||
|
const config = await loadConfig(merged.config)
|
||||||
|
const ctx = new Context(config)
|
||||||
|
await check(ctx)
|
||||||
|
})
|
||||||
|
|
||||||
|
program
|
||||||
|
.command('backup')
|
||||||
|
.description('create backups')
|
||||||
|
.configureHelp(helpConfig)
|
||||||
|
.addOption(specificLocation)
|
||||||
|
.addOption(allLocations)
|
||||||
|
.action(async (options, p) => {
|
||||||
|
// throw new NotImplemented('backup')
|
||||||
|
const merged = mergeOptions(options, p)
|
||||||
|
const config = await loadConfig(merged.config)
|
||||||
|
const ctx = new Context(config)
|
||||||
|
await backup(ctx)
|
||||||
|
})
|
||||||
|
|
||||||
|
program
|
||||||
|
.command('exec')
|
||||||
|
.description('execute arbitrary native restic commands for given repositories')
|
||||||
|
.configureHelp(helpConfig)
|
||||||
|
.addOption(specificRepo)
|
||||||
|
.addOption(allRepos)
|
||||||
|
.allowExcessArguments(true)
|
||||||
|
.action((options, p) => {
|
||||||
|
throw new NotImplemented('exec')
|
||||||
|
})
|
||||||
|
|
||||||
|
program
|
||||||
|
.command('forget')
|
||||||
|
.description('forget snapshots according to the specified policies')
|
||||||
|
.configureHelp(helpConfig)
|
||||||
|
.addOption(specificLocation)
|
||||||
|
.addOption(allLocations)
|
||||||
|
// Pass natively
|
||||||
|
// .option('--dry-run', 'do not write changes, show what would be affected')
|
||||||
|
// .option('--prune', 'also prune repository')
|
||||||
|
.action((options) => {
|
||||||
|
throw new NotImplemented('backup')
|
||||||
|
})
|
||||||
|
|
||||||
|
program
|
||||||
|
.command('restore')
|
||||||
|
.description('restore a snapshot to a given location')
|
||||||
|
.option('--force', 'overwrite target folder')
|
||||||
|
.option('--from <repository>', 'repository from which to restore')
|
||||||
|
.option('--to <path>', 'path where to restore the data')
|
||||||
|
.option('-l, --location <location>', 'location to be restored')
|
||||||
|
.argument('[snapshot-id]', 'snapshot to be restored. if empty latest will be taken')
|
||||||
|
.action(() => {
|
||||||
|
throw new NotImplemented('restore')
|
||||||
|
})
|
||||||
|
|
||||||
|
const self = new Command('self').description('utility commands for managing autorestic').configureHelp(helpConfig)
|
||||||
|
self.command('install').action(() => {
|
||||||
|
throw new NotImplemented('install')
|
||||||
|
})
|
||||||
|
self.command('uninstall').action(() => {
|
||||||
|
throw new NotImplemented('uninstall')
|
||||||
|
})
|
||||||
|
self.command('upgrade').action(() => {
|
||||||
|
throw new NotImplemented('upgrade')
|
||||||
|
})
|
||||||
|
self.command('completion').action(() => {
|
||||||
|
throw new NotImplemented('completion')
|
||||||
|
})
|
||||||
|
program.addCommand(self)
|
||||||
|
|
||||||
|
try {
|
||||||
|
await program.parseAsync()
|
||||||
|
} catch (e) {
|
||||||
|
if (e instanceof CustomError) {
|
||||||
|
Log.fatal(e.message)
|
||||||
|
} else if (e instanceof Error) {
|
||||||
|
Log.fatal(`unknown error: ${e.message}`)
|
||||||
|
}
|
||||||
|
process.exit(1)
|
||||||
|
} finally {
|
||||||
|
// TODO: Unlock
|
||||||
|
}
|
@ -0,0 +1,20 @@
|
|||||||
|
import { describe, expect, mock, test, beforeEach } from 'bun:test'
|
||||||
|
import { lockRepo } from '.'
|
||||||
|
import { Context } from '../models/context'
|
||||||
|
import { mkdir, rm } from 'node:fs/promises'
|
||||||
|
|
||||||
|
const mockPath = './test/'
|
||||||
|
const mockContext: Context = { config: { meta: { path: mockPath } } } as any
|
||||||
|
|
||||||
|
describe('lock', () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
// Cleanup lock file
|
||||||
|
await rm(mockPath, { recursive: true, force: true })
|
||||||
|
await mkdir(mockPath, { recursive: true })
|
||||||
|
})
|
||||||
|
|
||||||
|
test('lock', () => {
|
||||||
|
lockRepo(mockContext, 'foo')
|
||||||
|
// lockRepo(mockContext, 'foo')
|
||||||
|
})
|
||||||
|
})
|
@ -0,0 +1,78 @@
|
|||||||
|
import { readFileSync, writeFileSync } from 'node:fs'
|
||||||
|
import yaml from 'yaml'
|
||||||
|
import { relativePath } from '../config/resolution'
|
||||||
|
import { LockfileAlreadyLocked } from '../errors'
|
||||||
|
import { Log } from '../logger'
|
||||||
|
import { Context } from '../models/context'
|
||||||
|
import { Lockfile, LockfileSchema } from './schema'
|
||||||
|
import { wait } from '../utils/time'
|
||||||
|
|
||||||
|
const LOCKFILE = '.autorestic.lock'
|
||||||
|
const VERSION = 2
|
||||||
|
const l = Log.child({ command: 'lock' })
|
||||||
|
|
||||||
|
function load(ctx: Context): Lockfile {
|
||||||
|
const defaultLockfile = { version: VERSION, cron: {}, running: {} }
|
||||||
|
try {
|
||||||
|
const path = relativePath(ctx.config.meta.path, LOCKFILE)
|
||||||
|
l.trace('looking for lock file', { path })
|
||||||
|
// throw new Error(path)
|
||||||
|
const rawConfig = readFileSync(path, 'utf-8')
|
||||||
|
const config = yaml.parse(rawConfig)
|
||||||
|
const parsed = LockfileSchema.safeParse(config)
|
||||||
|
if (!parsed.success) return defaultLockfile
|
||||||
|
if (parsed.data.version < VERSION) {
|
||||||
|
l.debug('lockfile is old and will be overwritten')
|
||||||
|
return defaultLockfile
|
||||||
|
}
|
||||||
|
return parsed.data
|
||||||
|
} catch {
|
||||||
|
return defaultLockfile
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function write(ctx: Context, lockfile: Lockfile) {
|
||||||
|
const path = relativePath(ctx.config.meta.path, LOCKFILE)
|
||||||
|
writeFileSync(path, yaml.stringify(lockfile), 'utf-8')
|
||||||
|
}
|
||||||
|
|
||||||
|
export function lockRepo(ctx: Context, repo: string) {
|
||||||
|
const lock = load(ctx)
|
||||||
|
l.trace('trying to lock repository', { repo })
|
||||||
|
if (lock.running[repo]) throw new LockfileAlreadyLocked(repo)
|
||||||
|
lock.running[repo] = true
|
||||||
|
write(ctx, lock)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Waits for a repo to become unlocked, and errors if it does not succeed in the given timeout.
|
||||||
|
*
|
||||||
|
* @param [timeout=10] max seconds to wait for repo to become unlocked
|
||||||
|
*/
|
||||||
|
export async function waitForRepo(ctx: Context, repo: string, timeout = 10) {
|
||||||
|
const now = Date.now()
|
||||||
|
while (Date.now() - now < timeout * 1_000) {
|
||||||
|
try {
|
||||||
|
lockRepo(ctx, repo)
|
||||||
|
l.trace('repo is free again', { repo })
|
||||||
|
break
|
||||||
|
} catch {
|
||||||
|
l.trace('waiting for repo to be unlocked', { repo })
|
||||||
|
await wait(0.1) // Wait for 100ms
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw new LockfileAlreadyLocked(repo)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function updateLastRun(ctx: Context, location: string) {
|
||||||
|
const lock = load(ctx)
|
||||||
|
lock.cron[location] = Date.now()
|
||||||
|
write(ctx, lock)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function unlockRepo(ctx: Context, repo: string) {
|
||||||
|
l.trace('unlocking repository', { repo })
|
||||||
|
const lock = load(ctx)
|
||||||
|
lock.running[repo] = false
|
||||||
|
write(ctx, lock)
|
||||||
|
}
|
@ -0,0 +1,13 @@
|
|||||||
|
import { z } from 'zod'
|
||||||
|
|
||||||
|
export const LockfileSchema = z.strictObject({
|
||||||
|
version: z.number().min(0).describe('lockfile version'),
|
||||||
|
running: z
|
||||||
|
.record(z.string().describe('repository'), z.boolean().describe('whether repository is running'))
|
||||||
|
.describe('running information for each repository'),
|
||||||
|
cron: z
|
||||||
|
.record(z.string().describe('location'), z.number().describe('timestamp of last backup'))
|
||||||
|
.describe('information about last run for a given location. in milliseconds'),
|
||||||
|
})
|
||||||
|
|
||||||
|
export type Lockfile = z.infer<typeof LockfileSchema>
|
@ -0,0 +1,37 @@
|
|||||||
|
import Pino, { type LoggerOptions } from 'pino'
|
||||||
|
import Pretty from 'pino-pretty'
|
||||||
|
|
||||||
|
// https://getpino.io/#/docs/api?id=loggerlevel-string-gettersetter
|
||||||
|
export enum LogLevel {
|
||||||
|
Trace = 'trace',
|
||||||
|
Debug = 'debug',
|
||||||
|
Info = 'info',
|
||||||
|
Warn = 'warn',
|
||||||
|
Error = 'error',
|
||||||
|
Fatal = 'fatal',
|
||||||
|
Silent = 'silent',
|
||||||
|
}
|
||||||
|
|
||||||
|
const pretty = !process.env.CI
|
||||||
|
const options: LoggerOptions = {
|
||||||
|
base: undefined,
|
||||||
|
level: LogLevel.Info,
|
||||||
|
}
|
||||||
|
|
||||||
|
export const Log = pretty ? Pino(options, Pretty({ colorize: true })) : Pino(options)
|
||||||
|
|
||||||
|
export function setLevelFromFlag(flag: number) {
|
||||||
|
switch (flag) {
|
||||||
|
case 1:
|
||||||
|
Log.level = LogLevel.Info
|
||||||
|
break
|
||||||
|
case 2:
|
||||||
|
Log.level = LogLevel.Debug
|
||||||
|
break
|
||||||
|
case 3:
|
||||||
|
Log.level = LogLevel.Trace
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
Log.error('invalid logging level')
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,25 @@
|
|||||||
|
import { Config } from '../config/schema/config'
|
||||||
|
import { CustomError } from '../errors'
|
||||||
|
import { Location } from './location'
|
||||||
|
import { Repository } from './repository'
|
||||||
|
|
||||||
|
export class Context {
|
||||||
|
repos: Repository[]
|
||||||
|
locations: Location[]
|
||||||
|
|
||||||
|
constructor(public config: Config) {
|
||||||
|
this.repos = Object.entries(config.repos).map(([name, r]) => new Repository(this, name, r))
|
||||||
|
this.locations = Object.entries(config.locations).map(([name, l]) => new Location(this, name, l))
|
||||||
|
}
|
||||||
|
|
||||||
|
getRepo(name: string) {
|
||||||
|
const repo = this.repos.find((r) => r.name === name)
|
||||||
|
if (!repo) throw new CustomError(`could not find backend "${name}"`)
|
||||||
|
return repo
|
||||||
|
}
|
||||||
|
getLocation(name: string) {
|
||||||
|
const location = this.locations.find((l) => l.name === name)
|
||||||
|
if (!location) throw new CustomError(`could not find location "${name}"`)
|
||||||
|
return location
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,28 @@
|
|||||||
|
import { Logger } from 'pino'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { LocationSchema } from '../config/schema/location'
|
||||||
|
import { Log } from '../logger'
|
||||||
|
import { asArray } from '../utils/array'
|
||||||
|
import { Context } from './context'
|
||||||
|
import { execute } from '../restic'
|
||||||
|
|
||||||
|
export class Location {
|
||||||
|
l: Logger
|
||||||
|
|
||||||
|
constructor(public ctx: Context, public name: string, public data: z.infer<typeof LocationSchema>) {
|
||||||
|
this.l = Log.child({ location: name })
|
||||||
|
}
|
||||||
|
|
||||||
|
async backup() {
|
||||||
|
this.l.trace('backing up location')
|
||||||
|
for (const name of asArray(this.data.to)) {
|
||||||
|
const repo = this.ctx.getRepo(name)
|
||||||
|
this.l.debug(repo.name)
|
||||||
|
await execute({
|
||||||
|
command: 'restic',
|
||||||
|
args: ['backup', '--dry-run'],
|
||||||
|
env: repo.env,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,66 @@
|
|||||||
|
import { Logger } from 'pino'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { relativePath } from '../config/resolution'
|
||||||
|
import { Config } from '../config/schema/config'
|
||||||
|
import { RepositorySchema } from '../config/schema/repository'
|
||||||
|
import { ResticError } from '../errors'
|
||||||
|
import { Log } from '../logger'
|
||||||
|
import { execute } from '../restic'
|
||||||
|
import { Context } from './context'
|
||||||
|
|
||||||
|
export class Repository {
|
||||||
|
l: Logger
|
||||||
|
|
||||||
|
constructor(public ctx: Context, public name: string, public data: z.infer<typeof RepositorySchema>) {
|
||||||
|
this.l = Log.child({ repository: this.name })
|
||||||
|
}
|
||||||
|
|
||||||
|
get repository(): string {
|
||||||
|
switch (this.data.type) {
|
||||||
|
case 'local':
|
||||||
|
return relativePath(this.ctx.config.meta.path, this.data.path)
|
||||||
|
case 'b2':
|
||||||
|
case 'azure':
|
||||||
|
case 'gs':
|
||||||
|
case 's3':
|
||||||
|
case 'sftp':
|
||||||
|
case 'rclone':
|
||||||
|
case 'swift':
|
||||||
|
case 'rest':
|
||||||
|
return `${this.data.type}:${this.data.path}`
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
get env() {
|
||||||
|
return {
|
||||||
|
...this.data.env,
|
||||||
|
RESTIC_PASSWORD: this.data.key,
|
||||||
|
RESTIC_REPOSITORY: this.repository,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* true if initialized
|
||||||
|
* false if already initialized
|
||||||
|
*/
|
||||||
|
async init(): Promise<boolean> {
|
||||||
|
this.l.trace('initializing')
|
||||||
|
const output = await execute({ command: 'restic', args: ['init'], env: this.env })
|
||||||
|
if (!output.ok) {
|
||||||
|
if (output.stderr.includes('config file already exists')) {
|
||||||
|
this.l.debug('already initialized')
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
throw new ResticError([output.stderr])
|
||||||
|
}
|
||||||
|
this.l.debug('initialized repository')
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
async check() {
|
||||||
|
this.l.trace('checking')
|
||||||
|
const output = await execute({ command: 'restic', args: ['check'], env: this.env })
|
||||||
|
if (!output.ok) throw new ResticError(['could not check repository', output.stderr])
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,44 @@
|
|||||||
|
import { execFile } from 'node:child_process'
|
||||||
|
import { Log } from '../logger'
|
||||||
|
import { BinaryNotAvailable } from '../errors'
|
||||||
|
|
||||||
|
export type ExecutionContext = {
|
||||||
|
command: string
|
||||||
|
args?: string[]
|
||||||
|
env?: Record<string, string>
|
||||||
|
}
|
||||||
|
export async function execute({
|
||||||
|
env,
|
||||||
|
args,
|
||||||
|
command,
|
||||||
|
}: ExecutionContext): Promise<{ code: number; stderr: string; stdout: string; ok: boolean }> {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
execFile(command, args ?? [], { env }, (err, stdout, stderr) => {
|
||||||
|
const code = err?.code ?? 0
|
||||||
|
resolve({
|
||||||
|
code,
|
||||||
|
ok: code === 0,
|
||||||
|
stderr,
|
||||||
|
stdout,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function isBinaryAvailable(command: string): Promise<boolean> {
|
||||||
|
const l = Log.child({ command })
|
||||||
|
try {
|
||||||
|
l.trace('checking if command is installed')
|
||||||
|
const result = await execute({ command })
|
||||||
|
return result.ok
|
||||||
|
} catch {
|
||||||
|
l.trace('not installed')
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function isResticAvailable() {
|
||||||
|
const bin = 'restic'
|
||||||
|
const installed = await isBinaryAvailable(bin)
|
||||||
|
if (!installed) throw new BinaryNotAvailable(bin)
|
||||||
|
}
|
@ -0,0 +1,12 @@
|
|||||||
|
import { describe, expect, test } from 'bun:test'
|
||||||
|
import { isSubset } from './array'
|
||||||
|
|
||||||
|
describe('set theory', () => {
|
||||||
|
test('subset', () => {
|
||||||
|
expect(isSubset([1], [1, 2])).toBe(true)
|
||||||
|
expect(isSubset([1], [2])).toBe(false)
|
||||||
|
expect(isSubset([], [])).toBe(true)
|
||||||
|
expect(isSubset([1, 2, 3], [1, 2])).toBe(false)
|
||||||
|
expect(isSubset([1, 2], [1, 2])).toBe(true)
|
||||||
|
})
|
||||||
|
})
|
@ -0,0 +1,7 @@
|
|||||||
|
export function asArray<T>(singleOrArray: T | T[]): T[] {
|
||||||
|
return Array.isArray(singleOrArray) ? singleOrArray : [singleOrArray]
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isSubset<T>(subset: T[], set: T[]): boolean {
|
||||||
|
return subset.every((v) => set.includes(v))
|
||||||
|
}
|
@ -0,0 +1,33 @@
|
|||||||
|
import { expect, test, describe } from 'bun:test'
|
||||||
|
import { setByPath } from './path'
|
||||||
|
|
||||||
|
describe('set by path', () => {
|
||||||
|
test('simple', () => {
|
||||||
|
expect(setByPath({}, 'a', true)).toEqual({ a: true })
|
||||||
|
expect(setByPath({}, 'f', { ok: true })).toEqual({ f: { ok: true } })
|
||||||
|
expect(setByPath([], '0', true)).toEqual([true])
|
||||||
|
expect(setByPath([], '2', false)).toEqual([undefined, undefined, false])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('object', () => {
|
||||||
|
expect(setByPath({}, 'a.b', true)).toEqual({ a: { b: true } })
|
||||||
|
expect(setByPath({}, 'a.b.c', true)).toEqual({ a: { b: { c: true } } })
|
||||||
|
expect(setByPath({ a: true }, 'b', false)).toEqual({ a: true, b: false })
|
||||||
|
expect(setByPath({ a: { b: true } }, 'a.c', false)).toEqual({ a: { b: true, c: false } })
|
||||||
|
|
||||||
|
expect(() => setByPath({ a: 'foo' }, 'a.b', true)).toThrow()
|
||||||
|
expect(setByPath({ a: 'foo' }, 'a', true)).toEqual({ a: true })
|
||||||
|
})
|
||||||
|
|
||||||
|
test('array', () => {
|
||||||
|
expect(() => setByPath([], 'a', true)).toThrow()
|
||||||
|
expect(setByPath([], '0', true)).toEqual([true])
|
||||||
|
expect(setByPath([], '0.0.0', true)).toEqual([[[true]]])
|
||||||
|
expect(setByPath([], '0.1.2', true)).toEqual([[undefined, [undefined, undefined, true]]])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('mixed', () => {
|
||||||
|
expect(setByPath({ items: [] }, 'items.0.name', 'John')).toEqual({ items: [{ name: 'John' }] })
|
||||||
|
expect(setByPath([], '0.name', 'John')).toEqual([{ name: 'John' }])
|
||||||
|
})
|
||||||
|
})
|
@ -0,0 +1,24 @@
|
|||||||
|
function parseKey(key: any) {
|
||||||
|
const asNumber = parseInt(key)
|
||||||
|
const isString = isNaN(asNumber)
|
||||||
|
return [isString ? key : asNumber, isString]
|
||||||
|
}
|
||||||
|
|
||||||
|
export function setByPath(source: object, path: string, value: unknown) {
|
||||||
|
const segments = path.split('.')
|
||||||
|
const last = segments.length - 1
|
||||||
|
let node: any = source
|
||||||
|
for (const [i, segment] of segments.entries()) {
|
||||||
|
const [key, isString] = parseKey(segment)
|
||||||
|
if (Array.isArray(node) && isString) throw new Error(`array require a numeric index`)
|
||||||
|
if (typeof node !== 'object') throw new Error(`could not set path "${segment}" on ${node}.`)
|
||||||
|
if (i === last) {
|
||||||
|
node[key] = value
|
||||||
|
} else {
|
||||||
|
const [_, isNextString] = parseKey(segments[i + 1])
|
||||||
|
if (node[key] === undefined) node[key] = isNextString ? {} : []
|
||||||
|
node = node[key]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return source
|
||||||
|
}
|
@ -0,0 +1,3 @@
|
|||||||
|
export function wait(seconds: number): Promise<never> {
|
||||||
|
return new Promise((resolve) => setTimeout(resolve, seconds * 1000))
|
||||||
|
}
|
@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"extends": "./node_modules/bun-types/tsconfig.json",
|
||||||
|
"compilerOptions": {
|
||||||
|
"strict": true,
|
||||||
|
"noFallthroughCasesInSwitch": true,
|
||||||
|
"noEmit": true
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in new issue