chore: update public repo

This commit is contained in:
nano :3 2025-01-14 02:38:00 +00:00
parent e0109980c0
commit e7c9507247
Signed by: desu-bot
SSH key fingerprint: SHA256:I2g/pbGhrslSQ4yRCMEvP3GKI29uNQ01z5xVb7kz/OI
25 changed files with 5364 additions and 0 deletions

13
.gitignore vendored Normal file
View file

@ -0,0 +1,13 @@
node_modules/
private/
.nyc_output/
**/.DS_Store
.idea
.vscode
*.log
/assets
coverage
.rollup.cache
*.tsbuildinfo
.env

26
LICENSE Normal file
View file

@ -0,0 +1,26 @@
# DON'T BE A DICK PUBLIC LICENSE
> Version 1.1, December 2016
> Copyright (C) 2024 alina sireneva
Everyone is permitted to copy and distribute verbatim or modified
copies of this license document.
> DON'T BE A DICK PUBLIC LICENSE
> TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
1. Do whatever you like with the original work, just don't be a dick.
Being a dick includes - but is not limited to - the following instances:
1a. Outright copyright infringement - Don't just copy this and change the name.
1b. Selling the unmodified original with no work done what-so-ever, that's REALLY being a dick.
1c. Modifying the original work to contain hidden harmful content. That would make you a PROPER dick.
2. If you become rich through modifications, related works/services, or supporting the original work,
share the love. Only a dick would make loads off this work and not buy the original work's
creator(s) a pint.
3. Code is provided with no warranty. Using somebody else's code and bitching when it goes wrong makes
you a DONKEY dick. Fix the problem yourself. A non-dick would submit the fix back.

22
eslint.config.js Normal file
View file

@ -0,0 +1,22 @@
import antfu from '@antfu/eslint-config'
export default antfu({
ignores: ['assets/'],
typescript: true,
rules: {
'curly': ['error', 'multi-line'],
'style/brace-style': ['error', '1tbs', { allowSingleLine: true }],
'n/prefer-global/buffer': 'off',
'no-restricted-globals': ['error', 'Buffer', '__dirname', 'require'],
'style/quotes': ['error', 'single', { avoidEscape: true }],
'test/consistent-test-it': 'off',
'test/prefer-lowercase-title': 'off',
'antfu/if-newline': 'off',
'style/max-statements-per-line': ['error', { max: 2 }],
'ts/no-redeclare': 'off',
'no-alert': 'off',
'no-console': 'off',
'node/prefer-global/process': 'off',
'unused-imports/no-unused-vars': 'off',
},
})

38
package.json Normal file
View file

@ -0,0 +1,38 @@
{
"name": "teidesu-scripts",
"type": "module",
"packageManager": "pnpm@9.5.0",
"peerDependencies": {
"typescript": "^5.0.0"
},
"dependencies": {
"@faker-js/faker": "^9.3.0",
"@fuman/io": "^0.0.4",
"@fuman/node": "^0.0.4",
"@mtcute/node": "^0.19.1",
"@types/plist": "^3.0.5",
"cheerio": "^1.0.0",
"es-main": "^1.3.0",
"filesize": "^10.1.6",
"json5": "^2.2.3",
"kuromoji": "^0.1.2",
"nanoid": "^5.0.9",
"plist": "^3.1.0",
"qrcode-terminal": "^0.12.0",
"tough-cookie": "^5.0.0",
"tough-cookie-file-store": "^2.0.3",
"undici": "^7.2.0",
"wanakana": "^5.3.1"
},
"devDependencies": {
"@antfu/eslint-config": "3.10.0",
"@fuman/fetch": "0.0.7",
"@fuman/utils": "0.0.4",
"@types/node": "22.10.0",
"domhandler": "^5.0.3",
"dotenv": "16.4.5",
"htmlparser2": "^10.0.0",
"zod": "3.23.8",
"zx": "8.2.2"
}
}

3931
pnpm-lock.yaml Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,20 @@
import qrTerminal from 'qrcode-terminal'
import { createTg } from '../../utils/telegram.ts'
const sessionName = process.argv[2]
if (!sessionName) {
console.error('Usage: mtcute-login.ts <session name>')
process.exit(1)
}
const tg = createTg(sessionName)
const self = await tg.start({
qrCodeHandler(url, expires) {
console.log(qrTerminal.generate(url, { small: true }))
},
})
console.log(`Logged in as ${self.displayName} (${self.id})`)
await tg.close()

View file

@ -0,0 +1,105 @@
import type { NavidromeSong } from '../../utils/navidrome.ts'
import { createRequire } from 'node:module'
import { join } from 'node:path'
import kuromoji from 'kuromoji'
import { isKana, toRomaji } from 'wanakana'
import { fetchSongs, navidromeFfetch as ffetch } from '../../utils/navidrome.ts'
const WHITELIST_KEYS = new Set([
// actual different tracks with the same title
'["sorry about my face","untitled track"]',
'["kooeetekumogeemusu","neko bushou sengoku emaki"]',
'["eve","merufuakutorii"]',
// todo
'["arm","legend of zelda"]',
'["arm","tomorrow heart beat ~ ashita anata ni dokkidoki☆ ~"]',
'["dwat","rotladatormarf"]',
'["fujiwara mari sai","zenbuatashinokawaiino"]',
])
const moji = await new Promise<any>((resolve, reject) => {
kuromoji.builder({
dicPath: join(createRequire(import.meta.url).resolve('kuromoji/'), '../../dict'),
}).build((err, tokenizer) => {
if (err) return reject(err)
resolve(tokenizer)
})
})
function clean(s: string) {
const str = s.toLowerCase()
.replace(/\(Explicit\)/i, '')
.replace(/[!@#$%^&*()_+=[\]{}\\|/,.;':"<>`~-]/g, '')
if (str.match(/[\u3000-\u303F\u3040-\u309F\u30A0-\u30FF\uFF00-\uFF9F\u4E00-\u9FAF\u3400-\u4DBF]/)) {
// has japanese
const tokens = moji.tokenize(str)
let res = ''
for (const token of tokens) {
if (token.word_type === 'UNKNOWN') {
res += isKana(token.surface_form) ? toRomaji(token.surface_form) : token.surface_form
} else if (token.word_type === 'KNOWN') {
res += `${toRomaji(token.reading)} `
}
}
return res.trimEnd()
}
return str
}
const CHUNK_SIZE = 1000
function getSongKey(song: NavidromeSong) {
return JSON.stringify([
clean(song.artist),
clean(song.title),
])
}
const seen = new Map<string, NavidromeSong[]>()
for (let offset = 0; ; offset += CHUNK_SIZE) {
const songs = await fetchSongs(offset, CHUNK_SIZE)
if (songs.length === 0) break
for (const song of songs) {
const key = getSongKey(song)
if (WHITELIST_KEYS.has(key)) continue
let arr = seen.get(key)
if (!arr) {
arr = []
seen.set(key, arr)
}
arr.push(song)
}
console.log('⌛ fetched chunk %d (%d items)', Math.floor(offset / CHUNK_SIZE), songs.length)
}
const keysSorted = Array.from(seen.keys()).sort()
let duplicates = 0
for (const key of keysSorted) {
const arr = seen.get(key)!
if (arr.length === 1) continue
duplicates += 1
console.log()
console.log('found duplicates for %s:', key)
for (const song of arr) {
console.log(' %s - %s (from %s - %s) (at %s)', song.artist, song.title, song.albumArtist, song.album, song.path)
}
}
if (duplicates === 0) {
console.log('✅ no duplicates found')
} else {
console.log('🚨 %d duplicates found', duplicates)
}

View file

@ -0,0 +1,66 @@
import { readFile, rm } from 'node:fs/promises'
import { join } from 'node:path'
import { $ } from 'zx'
import { downloadStream } from '../../utils/fetch.ts'
import { getEnv } from '../../utils/misc.ts'
import { fetchSongs } from '../../utils/navidrome.ts'
import { WebdavClient } from '../../utils/webdav.ts'
const webdav = new WebdavClient({
baseUrl: getEnv('NAVIDROME_WEBDAV_ENDPOINT'),
username: getEnv('NAVIDROME_WEBDAV_USERNAME'),
password: getEnv('NAVIDROME_WEBDAV_PASSWORD'),
})
const CHUNK_SIZE = 1000
for (let offset = 0; ; offset += CHUNK_SIZE) {
const songs = await fetchSongs(offset, CHUNK_SIZE)
if (songs.length === 0) break
for (const song of songs) {
const ext = song.path.split('.').pop()!
if (ext !== 'm4a') continue
console.log('❌ song %s is m4a, remuxing...', song.path)
const webdavPath = song.path.replace('/music/s3/', '/')
const res = await webdav.get(webdavPath).catch(() => null)
if (!res) {
console.log(' ❌ failed to get %s', webdavPath)
continue
}
const tmpfile = join('assets', `${song.id}.m4a`)
await downloadStream(res.body!, tmpfile)
console.log(' - downloaded to %s', tmpfile)
const probe = await $`ffprobe -v error -show_entries stream=codec_type,codec_name,index:stream_tags=title,language -of json ${tmpfile}`.json()
const audioStream = probe.streams.find(stream => stream.codec_type === 'audio')
if (!audioStream) {
console.log(' ❌ no audio stream found')
await rm(tmpfile)
continue
}
const codec = audioStream.codec_name
if (codec !== 'flac') {
console.log(` ❌ audio stream is ${codec}, not flac, skipping`)
await rm(tmpfile)
continue
}
console.log(' - audio stream is flac, remuxing')
// remux
const remuxed = join('assets', `${song.id}.flac`)
await rm(remuxed, { force: true })
await $`ffmpeg -i ${tmpfile} -c:a copy ${remuxed}`.quiet(true)
console.log(' - remuxed to %s', remuxed)
await rm(tmpfile)
await webdav.put(webdavPath.replace('.m4a', '.flac'), await readFile(remuxed))
await webdav.delete(webdavPath)
console.log(' - uploaded to %s', webdavPath.replace('.m4a', '.flac'))
await rm(remuxed)
}
}

View file

@ -0,0 +1,39 @@
import { filesize } from 'filesize'
import { z } from 'zod'
import { ffetch } from '../../utils/fetch.ts'
import { getEnv } from '../../utils/misc.ts'
const res = await ffetch('/api/v0/transfers/uploads', {
baseUrl: getEnv('SLSKD_ENDPOINT'),
headers: {
cookie: getEnv('SLSKD_COOKIE'),
},
}).parsedJson(z.array(
z.object({
username: z.string(),
directories: z.array(z.object({
directory: z.string(),
fileCount: z.number(),
files: z.array(z.object({
id: z.string(),
filename: z.string(),
state: z.string(),
bytesTransferred: z.number(),
})),
})),
}),
))
let total = 0
for (const user of res) {
for (const dir of user.directories) {
for (const file of dir.files) {
if (file.state !== 'Completed, Succeeded') continue
total += file.bytesTransferred
}
}
}
console.log(filesize(total))

View file

@ -0,0 +1,58 @@
import { iter } from '@fuman/utils'
import { z } from 'zod'
import { minimist, question } from 'zx'
import { downloadFile, ffetch } from '../../utils/fetch.ts'
const args = minimist(process.argv.slice(2), {
string: ['filename'],
})
const query = args._[0] ?? await question('Search query (Artist - Album): ')
const data = await ffetch('https://api.deezer.com/search', {
query: {
q: query,
limit: 15,
},
}).parsedJson(z.object({
data: z.array(z.object({
type: z.literal('track'),
title: z.string(),
artist: z.object({
name: z.string(),
}),
album: z.object({
id: z.number(),
title: z.string(),
cover_xl: z.string(),
}),
})),
}))
const groupedByAlbum = new Map<number, typeof data['data']>()
for (const result of data.data) {
const albumId = result.album.id
if (!groupedByAlbum.has(albumId)) {
groupedByAlbum.set(albumId, [])
}
groupedByAlbum.get(albumId)!.push(result)
}
const idxToAlbum = new Map<number, number>()
for (const [idx, [id, tracks]] of iter.enumerate(groupedByAlbum.entries())) {
idxToAlbum.set(idx, id)
console.log(`${idx + 1}. ${tracks[0].artist.name} - ${tracks[0].album.title}`)
for (const track of tracks) {
console.log(` ${track.title}`)
}
}
console.log('Enter number to download album art:')
const number = Number.parseInt(await question('[1] > ') || '1')
const artworkUrl = groupedByAlbum.get(idxToAlbum.get(number - 1)!)![0].album.cover_xl
await downloadFile(artworkUrl, args.filename ?? `assets/${query.replace(/\s/g, '_')}.jpg`)

View file

@ -0,0 +1,129 @@
import { rm } from 'node:fs/promises'
import { $, question } from 'zx'
import { fileExists } from '../../utils/fs.ts'
let filename = await question('filename >')!
const startTs = await question('start timestamp >')
const endTs = await question('end timestamp >')
const outputFilename = await question('output filename [output.mp4] >') || 'assets/output.mp4'
if (filename[0] === '\'' && filename[filename.length - 1] === '\'') {
filename = filename.slice(1, -1)
}
const ffprobe = await $`ffprobe -v error -show_entries stream=codec_type,codec_name,index:stream_tags=title,language -of json ${filename}`.json()
async function chooseStream(type: string, options: any[], allowNone = false) {
console.log(`Found ${type} streams:`)
for (let i = 0; i < options.length; i++) {
const stream = options[i]
console.log(`[${i + 1}] (${stream.codec_name}, ${stream.tags.language}) ${stream.tags.title}`)
}
if (allowNone) {
console.log(`[0] No ${type}`)
}
const res = await question(`select ${type} >`) || '0'
if (res === '0' && allowNone) {
return null
}
const streamIndex = Number.parseInt(res)
if (Number.isNaN(streamIndex) || streamIndex < 1 || streamIndex > options.length) {
console.error('Invalid input')
process.exit(1)
}
return streamIndex - 1
}
const allVideos = ffprobe.streams.filter(stream => stream.codec_type === 'video')
const allAudios = ffprobe.streams.filter(stream => stream.codec_type === 'audio')
const allSubtitles = ffprobe.streams.filter(stream => stream.codec_type === 'subtitle')
let videoStream: number | null = null
let audioStream: number | null = null
let subtitleStream: number | null = null
if (allVideos.length > 1) {
videoStream = await chooseStream('video', allVideos)
} else if (allVideos.length > 0) {
videoStream = 0
} else {
console.error('No video streams found')
process.exit(1)
}
if (allAudios.length > 1) {
audioStream = await chooseStream('audio', allAudios)
} else if (allAudios.length > 0) {
audioStream = 0
} else {
console.warn('No audio streams found, proceeding without audio')
}
if (allSubtitles.length > 0) {
subtitleStream = await chooseStream('subtitle', allSubtitles, true)
}
const args: string[] = [
'-i',
filename,
'-c:v',
'libx264',
'-map',
`0:v:${videoStream}`,
'-c:v',
'libx264',
]
if (audioStream !== null) {
args.push('-map', `0:a:${audioStream}`)
}
if (subtitleStream !== null) {
const filenameEscaped = filename.replace(/'/g, "'\\\\\\''")
args.push('-vf', `format=yuv420p,subtitles='${filenameEscaped}':si=${subtitleStream}`)
} else {
args.push('-vf', 'format=yuv420p')
}
if (audioStream !== null) {
args.push('-c:a', 'libopus')
if (allAudios[audioStream].codec_name === 'flac') {
args.push('-b:a', '320k')
}
}
args.push(
'-ss',
startTs!,
'-to',
endTs!,
outputFilename,
)
if (await fileExists(outputFilename)) {
const overwrite = await question('Output file already exists, overwrite? [y/N] >')
if (overwrite?.toLowerCase() !== 'y') {
process.exit(0)
}
await rm(outputFilename)
}
try {
$.env.AV_LOG_FORCE_COLOR = 'true'
await $`ffmpeg ${args}`
} catch (e) {
process.exit(1)
}
const openDir = await question('open output directory? [Y/n] >')
if (!openDir || openDir?.toLowerCase() === 'y') {
await $`open -R ${outputFilename}`
}

View file

@ -0,0 +1,46 @@
import { iter } from '@fuman/utils'
import { z } from 'zod'
import { minimist, question } from 'zx'
import { downloadFile, ffetch } from '../../utils/fetch.ts'
const args = minimist(process.argv.slice(2), {
string: ['entity', 'filename'],
})
const entity = args.entity ?? 'album'
const query = args._[0] ?? await question('Search query (Artist - Album): ')
const data = await ffetch('https://itunes.apple.com/search', {
query: {
term: query,
entity,
limit: 15,
},
}).parsedJson(z.object({
results: z.array(z.object({
kind: z.literal('song').optional(),
artistName: z.string(),
collectionName: z.string(),
artworkUrl100: z.string(),
releaseDate: z.string(),
trackName: z.string().optional(),
}).passthrough()),
}))
for (const [i, result] of iter.enumerate(data.results)) {
if (result.kind === 'song') {
console.log(`${i + 1}. ${result.artistName} - ${result.trackName} (${result.collectionName}, ${new Date(result.releaseDate).toLocaleDateString('ru-RU')})`)
continue
}
console.log(`${i + 1}. ${result.artistName} - ${result.collectionName} (${new Date(result.releaseDate).toLocaleDateString('ru-RU')})`)
}
console.log('Enter number to download album art:')
const number = Number.parseInt(await question('[1] > ') || '1')
const artworkUrl = data.results[number - 1].artworkUrl100.replace('100x100', '1500x1500')
await downloadFile(artworkUrl, args.filename ?? `assets/${query.replace(/\s/g, '_')}.jpg`)

View file

@ -0,0 +1,63 @@
import { iter } from '@fuman/utils'
import { z } from 'zod'
import { minimist, question } from 'zx'
import { downloadFile, ffetch } from '../../utils/fetch.ts'
const args = minimist(process.argv.slice(2), {
string: ['filename'],
})
const query = args._[0] ?? await question('Search query: ')
const data = await ffetch('https://itunes.apple.com/search', {
query: {
term: query,
entity: 'musicArtist',
limit: 15,
},
}).parsedJson(z.object({
results: z.array(z.object({
wrapperType: z.literal('artist'),
artistName: z.string(),
artistLinkUrl: z.string(),
primaryGenreName: z.string().default('Unknown'),
}).passthrough()),
}))
for (const [i, result] of iter.enumerate(data.results)) {
console.log(`${i + 1}. ${result.artistName} (${result.primaryGenreName})`)
continue
}
console.log('Enter number to download artist art:')
const number = Number.parseInt(await question('[1] > ') || '1')
const pageUrl = data.results[number - 1].artistLinkUrl
const $ = await ffetch(pageUrl).cheerio()
const pageData = JSON.parse($('#serialized-server-data').html()!)
const pageDataValidated = z.tuple([
z.object({
data: z.object({
seoData: z.object({
artworkUrl: z.string(),
}),
}),
}),
]).parse(pageData)
// {w}x{h}{c}.{f}
const artworkUrl = pageDataValidated[0].data.seoData.artworkUrl
.replace('{w}', '2500')
.replace('{h}', '2500')
.replace('{c}', 'cc')
.replace('{f}', 'jpg')
if (artworkUrl === '/assets/meta/apple-music.png') {
console.log('No artwork available')
process.exit(1)
}
await downloadFile(artworkUrl, args.filename ?? `assets/${query.replace(/\s/g, '_')}.jpg`)

View file

@ -0,0 +1,51 @@
import { readFile } from 'node:fs/promises'
import { join } from 'node:path'
import plist from 'plist'
import { z } from 'zod'
import { $ } from 'zx'
import { ffetch } from '../../utils/fetch.ts'
const latestVerInfo = await ffetch('https://api.github.com/repos/forkgram/tdesktop/releases/latest').parsedJson(
z.object({
tag_name: z.string().transform(v => v.replace(/^v/, '')),
assets: z.array(z.object({
name: z.string(),
browser_download_url: z.string(),
})),
}),
)
const INSTALL_PATH = '/Applications/Forkgram.app'
console.log('latest version:', latestVerInfo.tag_name)
const installedPlist = await readFile(join(INSTALL_PATH, 'Contents/Info.plist'), 'utf8')
const installedPlistParsed = z.object({
CFBundleShortVersionString: z.string(),
}).parse(plist.parse(installedPlist))
console.log('installed version:', installedPlistParsed.CFBundleShortVersionString)
if (installedPlistParsed.CFBundleShortVersionString === latestVerInfo.tag_name) {
console.log('✅ no update needed')
process.exit(0)
}
const arm64Asset = latestVerInfo.assets.find(asset => asset.name === 'Forkgram.macOS.no.auto-update_arm64.zip')
if (!arm64Asset) {
console.error('❌ no arm64 asset found')
process.exit(1)
}
console.log('installing new version...')
await $`curl -L ${arm64Asset.browser_download_url} -o /tmp/forkgram.zip`
await $`unzip -o /tmp/forkgram.zip -d /tmp/forkgram`
await $`kill -9 $(pgrep -f /Applications/Forkgram.app/Contents/MacOS/Telegram)`
await $`rm -rf ${INSTALL_PATH}`
await $`mv /tmp/forkgram/Telegram.app ${INSTALL_PATH}`
await $`rm -rf /tmp/forkgram`
await $`xattr -cr ${INSTALL_PATH}`
await $`open ${INSTALL_PATH}`
console.log('✅ done')

26
tsconfig.json Normal file
View file

@ -0,0 +1,26 @@
{
"compilerOptions": {
"target": "ESNext",
"lib": ["ESNext", "DOM"],
"moduleDetection": "force",
"module": "ESNext",
// Bundler mode
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"allowJs": true,
// Best practices
"strict": true,
"noFallthroughCasesInSwitch": true,
"noImplicitAny": false,
"noPropertyAccessFromIndexSignature": false,
// Some stricter flags (disabled by default)
"noUnusedLocals": false,
"noUnusedParameters": false,
"noEmit": true,
"verbatimModuleSyntax": true,
"skipLibCheck": true
}
}

87
utils/captcha.ts Normal file
View file

@ -0,0 +1,87 @@
import { sleep } from '@fuman/utils'
import { z } from 'zod'
import { ffetch } from './fetch.ts'
import { getEnv } from './misc.ts'
const CreateTaskResponse = z.object({
errorId: z.number(),
errorCode: z.string().optional().nullable(),
taskId: z.number(),
})
const GetTaskResultResponse = z.object({
errorId: z.number(),
errorCode: z.string().optional().nullable(),
status: z.enum(['ready', 'processing']),
solution: z.unknown().optional(),
})
export async function solveCaptcha(task: unknown) {
const res = await ffetch.post('https://api.capmonster.cloud/createTask', {
json: {
clientKey: getEnv('CAPMONSTER_API_TOKEN'),
task,
},
}).parsedJson(CreateTaskResponse)
if (res.errorId) {
throw new Error(`createTask error ${res.errorId}: ${res.errorCode}`)
}
const taskId = res.taskId
await sleep(5_000)
let requestCount = 0
while (true) {
requestCount += 1
if (requestCount > 100) {
// "Limit: 120 requests per task. If the limit is exceeded, the user's account may be temporarily locked."
// just to be safe
throw new Error('captcha request count exceeded')
}
const res = await ffetch.post('https://api.capmonster.cloud/getTaskResult', {
json: {
clientKey: getEnv('CAPMONSTER_API_TOKEN'),
taskId,
},
}).parsedJson(GetTaskResultResponse)
if (res.errorId) {
throw new Error(`getTaskResult error ${res.errorId}: ${res.errorCode}`)
}
if (res.status === 'ready') {
return res.solution
}
await sleep(2_000)
}
}
export async function solveRecaptcha(params?: {
url: string
siteKey: string
s?: string
userAgent?: string
cookies?: string
isInvisible?: boolean
}) {
const res = await solveCaptcha({
type: 'RecaptchaV2TaskProxyless',
websiteURL: params?.url,
websiteKey: params?.siteKey,
recaptchaDataSValue: params?.s,
userAgent: params?.userAgent,
cookies: params?.cookies,
isInvisible: params?.isInvisible,
})
if (typeof res !== 'object' || !res || !('gRecaptchaResponse' in res) || typeof res.gRecaptchaResponse !== 'string') {
throw new Error('invalid recaptcha response')
}
return res.gRecaptchaResponse
}

113
utils/currency.ts Normal file
View file

@ -0,0 +1,113 @@
import { asyncPool } from '@fuman/utils'
import { z } from 'zod'
import { ffetch } from './fetch.ts'
import { getEnv } from './misc.ts'
// token management
const TOKENS = getEnv('OXR_TOKENS').split(',')
// api token => requests remaining
const usageAvailable = new Map<string, number>()
function getToken() {
// find token with the most requests remaining
const token = TOKENS.find(t => usageAvailable.get(t)! > 0)
if (!token) throw new Error('no tokens available')
// consume 1 request
usageAvailable.set(token, usageAvailable.get(token)! - 1)
return token
}
// base => other => value
// NB: ideally we should have expiration and persistence on this
const data = new Map<string, Record<string, number>>()
async function fetchMissingPairs(list: { from: string, to: string }[]) {
const missing = list.filter(c => !data.has(c.from) && !data.has(c.to) && c.from !== c.to)
if (missing.length === 0) return
const basesToFetch = new Set<string>()
for (const { from, to } of missing) {
if (!basesToFetch.has(from) && !basesToFetch.has(to)) {
basesToFetch.add(from)
}
}
if (!usageAvailable.size) {
// NB: ideally we should lock here for a production-ready implementation
// fetch usage for all tokens
await asyncPool(TOKENS, async (token) => {
const res = await ffetch('https://openexchangerates.org/api/usage.json', {
query: {
app_id: token,
},
}).parsedJson(z.object({
status: z.literal(200),
data: z.object({
app_id: z.string(),
status: z.literal('active'),
usage: z.object({
requests_remaining: z.number(),
}),
}),
}))
usageAvailable.set(token, res.data.usage.requests_remaining)
}, { onError: () => 'ignore' })
if (!usageAvailable.size) {
throw new Error('failed to fetch usage, are all tokens dead?')
}
}
// console.log('will fetch bases:', [...basesToFetch])
await asyncPool(basesToFetch, async (base) => {
const res = await ffetch('https://openexchangerates.org/api/latest.json', {
query: {
app_id: getToken(),
},
}).parsedJson(z.object({
rates: z.record(z.string(), z.number()),
}))
data.set(base, res.rates)
})
}
export async function convertCurrenciesBatch(list: { from: string, to: string, amount: number }[]) {
await fetchMissingPairs(list)
const ret: { from: string, to: string, amount: number, converted: number }[] = []
for (const { from, to, amount } of list) {
let result: number
if (from === to) {
result = amount
} else if (data.has(from)) {
const rate = data.get(from)![to]!
if (!rate) throw new Error(`rate unavailable: ${from} -> ${to}`)
result = amount * rate
// console.log('converted from', from, 'to', to, 'amount', amount, 'result', result, 'rate', rate)
} else if (data.has(to)) {
const rate = data.get(to)![from]!
if (!rate) throw new Error(`rate unavailable: ${from} -> ${to}`)
result = amount / rate
// console.log('converted rev from', from, 'to', to, 'amount', amount, 'result', result, 'rate', rate)
} else {
throw new Error(`rate unavailable: ${from} -> ${to}`)
}
ret.push({
from,
to,
amount,
converted: result,
})
}
return ret
}

37
utils/fetch.ts Normal file
View file

@ -0,0 +1,37 @@
import { createWriteStream } from 'node:fs'
import { type FfetchAddon, ffetchAddons, ffetchBase, type FfetchResultInternals } from '@fuman/fetch'
import { toughCookieAddon } from '@fuman/fetch/tough'
import { ffetchZodAdapter } from '@fuman/fetch/zod'
import { webReadableToFuman, write } from '@fuman/io'
import { nodeWritableToFuman } from '@fuman/node'
import { type CheerioAPI, load } from 'cheerio'
const cheerioAddon: FfetchAddon<object, { cheerio: () => Promise<CheerioAPI> }> = {
response: {
async cheerio(this: FfetchResultInternals<object>) {
this._headers ??= {}
this._headers.Accept ??= 'text/html; charset=utf-8'
return load(await this.text())
},
},
}
export const ffetch = ffetchBase.extend({
addons: [
ffetchAddons.parser(ffetchZodAdapter()),
cheerioAddon,
toughCookieAddon(),
],
})
export async function downloadStream(stream: ReadableStream, path: string) {
const file = nodeWritableToFuman(createWriteStream(path))
await write.pipe(file, webReadableToFuman(stream))
file.close()
}
export async function downloadFile(url: string, path: string, extra?: Parameters<typeof ffetch>[1]) {
const stream = await ffetch(url, extra).stream()
await downloadStream(stream, path)
}

19
utils/fs.ts Normal file
View file

@ -0,0 +1,19 @@
import * as fsp from 'node:fs/promises'
export async function fileExists(path: string): Promise<boolean> {
try {
const stat = await fsp.stat(path)
return stat.isFile()
} catch {
return false
}
}
export async function directoryExists(path: string): Promise<boolean> {
try {
const stat = await fsp.stat(path)
return stat.isDirectory()
} catch {
return false
}
}

10
utils/misc.ts Normal file
View file

@ -0,0 +1,10 @@
import 'dotenv/config'
export function getEnv(key: string): string
export function getEnv<T>(key: string, parser: (value: string) => T): T
export function getEnv<T>(key: string, parser?: (value: string) => T): T | string {
const value = process.env[key]
if (!value) throw new Error(`env variable ${key} not found`)
if (!parser) return value
return parser(value)
}

32
utils/navidrome.ts Normal file
View file

@ -0,0 +1,32 @@
import { z } from 'zod'
import { ffetch as ffetchBase } from './fetch.ts'
import { getEnv } from './misc.ts'
export const navidromeFfetch = ffetchBase.extend({
baseUrl: getEnv('NAVIDROME_ENDPOINT'),
headers: {
'x-nd-authorization': `Bearer ${getEnv('NAVIDROME_TOKEN')}`,
},
})
export const NavidromeSong = z.object({
id: z.string(),
title: z.string(),
album: z.string(),
albumArtist: z.string(),
artist: z.string(),
path: z.string(),
duration: z.number(),
})
export type NavidromeSong = z.infer<typeof NavidromeSong>
export function fetchSongs(offset: number, pageSize: number) {
return navidromeFfetch('/api/song', {
query: {
_start: offset,
_end: offset + pageSize,
_order: 'ASC',
_sort: 'title',
},
}).parsedJson(z.array(NavidromeSong))
}

78
utils/oauth.ts Normal file
View file

@ -0,0 +1,78 @@
import type { MaybePromise } from '@fuman/utils'
import * as fsp from 'node:fs/promises'
import { z } from 'zod'
export interface OauthStorage {
write: (value: string) => MaybePromise<void>
read: () => MaybePromise<string | null>
}
export class LocalOauthStorage implements OauthStorage {
constructor(private filename: string) {}
async write(value: string) {
await fsp.writeFile(this.filename, value)
}
async read() {
try {
return await fsp.readFile(this.filename, 'utf8')
} catch (e) {
return null
}
}
}
const OauthState = z.object({
accessToken: z.string(),
refreshToken: z.string().optional(),
expiresAt: z.number(),
})
type OauthState = z.infer<typeof OauthState>
export class OauthHandler {
constructor(private params: {
storage: OauthStorage
refreshToken: (refreshToken: string) => MaybePromise<{
accessToken: string
refreshToken: string
expiresIn: number
}>
/** number of milliseconds to subtract from token expiration time */
jitter?: number
}) {
this.params.jitter = this.params.jitter ?? 5000
}
#cache: OauthState | null = null
async readOauthState() {
if (this.#cache) return this.#cache
const value = await this.params.storage.read()
if (!value) return null
return OauthState.parse(JSON.parse(value))
}
async writeOauthState(value: OauthState) {
this.#cache = value
await this.params.storage.write(JSON.stringify(value))
}
async getAccessToken() {
const state = await this.readOauthState()
if (!state) return null
if (state.expiresAt < Date.now() + this.params.jitter!) {
if (!state.refreshToken) return null
const { accessToken, refreshToken, expiresIn } = await this.params.refreshToken(state.refreshToken)
await this.writeOauthState({
accessToken,
refreshToken,
expiresAt: Date.now() + expiresIn * 1000,
})
return accessToken
}
return state.accessToken
}
}

11
utils/telegram.ts Normal file
View file

@ -0,0 +1,11 @@
import { TelegramClient, type TelegramClientOptions } from '@mtcute/node'
import { getEnv } from './misc.ts'
export function createTg(session: string, extra?: Partial<TelegramClientOptions>) {
return new TelegramClient({
apiId: getEnv('TELEGRAM_API_ID', Number),
apiHash: getEnv('TELEGRAM_API_HASH'),
storage: `assets/${session}.session`,
...extra,
})
}

324
utils/webdav.ts Normal file
View file

@ -0,0 +1,324 @@
import { ffetchBase, type FfetchResult } from '@fuman/fetch'
import { asNonNull, assert, base64, utf8 } from '@fuman/utils'
import { Parser } from 'htmlparser2'
import { z } from 'zod'
const XML_HEADER = '<?xml version="1.0" encoding="utf-8" ?>'
export interface WebdavClientOptions {
baseUrl: string
username?: string
password?: string
headers?: Record<string, string>
}
export interface WebdavResourceBase {
href: string
name: string
status: string
lastModified?: Date
raw: Record<string, unknown>
// todo: lockdiscovery
// todo: supportedlock
}
export interface WebdavCollection extends WebdavResourceBase {
type: 'collection'
}
export interface WebdavFile extends WebdavResourceBase {
type: 'file'
size: number
etag?: string
contentType?: string
}
export type WebdavResource = WebdavCollection | WebdavFile
const DResponseSchema = z.object({
'd:href': z.string(),
'd:propstat': z.object({
'd:prop': z.object({
'd:resourcetype': z.union([
z.literal(true),
z.object({
'd:collection': z.literal(true),
}),
]),
'd:displayname': z.union([z.literal(true), z.string()]),
'd:getcontentlength': z.coerce.number().optional(),
'd:getlastmodified': z.string().transform(v => new Date(v)).optional(),
'd:getetag': z.string().optional(),
'd:getcontenttype': z.string().optional(),
}).passthrough(),
'd:status': z.string(),
}),
})
const DMultistatusSchema = z.object({
'd:multistatus': z.tuple([z.object({
'd:response': z.array(DResponseSchema),
})]),
})
function escapeXml(str: string) {
return str.replace(/</g, '&lt;').replace(/>/g, '&gt;')
}
function xmlToJson(xml: string) {
const res: Record<string, any[]> = {}
const stack: any[] = [res]
const parser = new Parser({
onopentag(name) {
name = name.toLowerCase()
const node: any = {}
const top = stack[stack.length - 1]
if (!top[name]) {
top[name] = []
}
top[name].push(node)
stack.push(node)
},
onclosetag(name) {
const obj = stack.pop()
const top = stack[stack.length - 1]
const ourIdx = top[name].length - 1
const keys = Object.keys(obj)
if (keys.length === 1 && keys[0] === '_text') {
top[name][ourIdx] = obj._text
} else if (keys.length === 0) {
top[name][ourIdx] = true
} else {
// replace one-element arrays with the element itself
for (const key of keys) {
if (key === 'd:response') continue
const val = obj[key]
if (Array.isArray(val) && val.length === 1) {
obj[key] = val[0]
}
}
}
},
ontext(text) {
const top = stack[stack.length - 1]
if (top._text === undefined) {
top._text = ''
}
top._text += text
},
})
parser.write(xml)
parser.end()
return res
}
export class WebdavClient {
readonly ffetch: typeof ffetchBase
readonly basePath
constructor(options: WebdavClientOptions) {
const headers: Record<string, string> = {
'Content-Type': 'application/xml; charset="utf-8"',
...options.headers,
}
if (options.username) {
let authStr = options.username
if (options.password) {
authStr += `:${options.password}`
}
headers.Authorization = `Basic ${base64.encode(utf8.encoder.encode(authStr))}`
}
this.ffetch = ffetchBase.extend({
baseUrl: options.baseUrl,
headers,
})
this.basePath = new URL(options.baseUrl).pathname
if (this.basePath[this.basePath.length - 1] !== '/') {
this.basePath += '/'
}
}
mapPropfindResponse = (obj: z.infer<typeof DResponseSchema>): WebdavResource => {
const name = obj['d:propstat']['d:prop']['d:displayname']
const base: WebdavResourceBase = {
href: obj['d:href'],
name: name === true ? '' : name,
status: obj['d:propstat']['d:status'],
lastModified: obj['d:propstat']['d:prop']['d:getlastmodified'],
raw: obj['d:propstat']['d:prop'],
}
if (base.href.startsWith(this.basePath)) {
base.href = base.href.slice(this.basePath.length)
if (base.href !== '/') {
base.href = `/${base.href}`
}
}
if (typeof obj['d:propstat']['d:prop']['d:resourcetype'] === 'object' && obj['d:propstat']['d:prop']['d:resourcetype']['d:collection']) {
const res = base as WebdavCollection
res.type = 'collection'
return res
} else {
const res = base as WebdavFile
res.type = 'file'
res.size = asNonNull(obj['d:propstat']['d:prop']['d:getcontentlength'])
res.etag = obj['d:propstat']['d:prop']['d:getetag']
res.contentType = obj['d:propstat']['d:prop']['d:getcontenttype']
return res
}
}
async propfind(
path: string,
params?: {
depth?: number | 'infinity'
properties?: string[]
},
): Promise<WebdavResource[]> {
const body = params?.properties
? [
XML_HEADER,
'<d:propfind xmlns:D="DAV:">',
'<d:prop>',
...params.properties.map(prop => `<${prop}/>`),
'</d:prop>',
'</d:propfind>',
].join('\n')
: undefined
const res = await this.ffetch(path, {
method: 'PROPFIND',
headers: {
Depth: params?.depth ? String(params.depth) : '1',
},
body,
}).text()
const json = DMultistatusSchema.parse(xmlToJson(res))
return json['d:multistatus'][0]['d:response'].map(this.mapPropfindResponse)
}
async proppatch(path: string, params: {
set?: Record<string, string | { _xml: string }>
remove?: string[]
}): Promise<void> {
if (!params.set && !params.remove) return
const lines: string[] = [
XML_HEADER,
'<d:propertyupdate xmlns:D="DAV:">',
]
if (params.set) {
lines.push('<d:set>')
for (const [key, value] of Object.entries(params.set ?? {})) {
lines.push(`<d:prop><${key}>${
typeof value === 'object' ? value._xml : escapeXml(value)
}</${key}></d:prop>`)
}
lines.push('</d:set>')
}
if (params.remove) {
lines.push('<d:remove>')
for (const key of params.remove) {
lines.push(`<d:prop><${key}/></d:prop>`)
}
lines.push('</d:remove>')
}
lines.push('</d:propertyupdate>')
const body = lines.join('\n')
await this.ffetch(path, {
method: 'PROPPATCH',
body,
})
}
async mkcol(path: string): Promise<void> {
const res = await this.ffetch(path, {
method: 'MKCOL',
})
if (res.status !== 201) throw new Error(`mkcol failed: ${res.status}`)
}
async delete(path: string): Promise<void> {
const res = await this.ffetch(path, {
method: 'DELETE',
})
if (res.status !== 204) throw new Error(`delete failed: ${res.status}`)
}
get(path: string): FfetchResult {
return this.ffetch(path, {
method: 'GET',
})
}
async put(path: string, body: BodyInit): Promise<void> {
await this.ffetch(path, {
method: 'PUT',
body,
})
}
async copy(
source: string,
destination: string,
params?: {
/** whether to overwrite the destination if it exists */
overwrite?: boolean
depth?: number | 'infinity'
},
): Promise<void> {
if (destination[0] === '/') destination = destination.slice(1)
if (this.basePath) destination = this.basePath + destination
const headers: Record<string, string> = {
Destination: destination,
}
if (params?.overwrite !== true) {
headers.Overwrite = 'F'
}
if (params?.depth) {
headers.Depth = String(params.depth)
}
const res = await this.ffetch(source, {
method: 'COPY',
headers,
})
if (res.status !== 201) throw new Error(`copy failed: ${res.status}`)
}
async move(
source: string,
destination: string,
params?: {
/** whether to overwrite the destination if it exists */
overwrite?: boolean
depth?: number | 'infinity'
},
): Promise<void> {
if (destination[0] === '/') destination = destination.slice(1)
if (this.basePath) destination = this.basePath + destination
const headers: Record<string, string> = {
Destination: destination,
}
if (params?.overwrite !== true) {
headers.Overwrite = 'F'
}
if (params?.depth) {
headers.Depth = String(params.depth)
}
const res = await this.ffetch(source, {
method: 'MOVE',
headers,
})
if (res.status !== 201) throw new Error(`move failed: ${res.status}`)
}
}

20
utils/xml.ts Normal file
View file

@ -0,0 +1,20 @@
import type { ChildNode } from 'domhandler'
import { DomHandler } from 'domhandler'
import { Parser } from 'htmlparser2'
export function xmlToDom(xml: string) {
let _error: Error | null = null
let _dom: ChildNode[] | null = null
const handler = new DomHandler((error, dom) => {
_error = error
_dom = dom
})
const parser = new Parser(handler)
parser.write(xml)
parser.end()
if (_error) throw _error
return _dom!
}