113 lines
3.6 KiB
JavaScript
113 lines
3.6 KiB
JavaScript
import fs from 'fs/promises'
|
|
import crypto from 'crypto'
|
|
import path from 'path'
|
|
import bencode from 'bencode'
|
|
|
|
/*
|
|
Taken from parse-torrent
|
|
*/
|
|
|
|
/**
|
|
* Parse a torrent. Throws an exception if the torrent is missing required fields.
|
|
* @param {Buffer|Object} torrent
|
|
* @return {Object} parsed torrent
|
|
*/
|
|
export async function decodeTorrentFile (file) {
|
|
let buffer = await fs.readFile(file)
|
|
let torrent = bencode.decode(buffer)
|
|
|
|
// sanity check
|
|
ensure(torrent.info, 'info')
|
|
ensure(torrent.info['name.utf-8'] || torrent.info.name, 'info.name')
|
|
ensure(torrent.info['piece length'], 'info[\'piece length\']')
|
|
ensure(torrent.info.pieces, 'info.pieces')
|
|
|
|
if (torrent.info.files) {
|
|
torrent.info.files.forEach(file => {
|
|
ensure(typeof file.length === 'number', 'info.files[0].length')
|
|
ensure(file['path.utf-8'] || file.path, 'info.files[0].path')
|
|
})
|
|
} else {
|
|
ensure(typeof torrent.info.length === 'number', 'info.length')
|
|
}
|
|
|
|
const result = {
|
|
info: torrent.info,
|
|
infoBuffer: bencode.encode(torrent.info),
|
|
name: (torrent.info['name.utf-8'] || torrent.info.name).toString(),
|
|
announce: []
|
|
}
|
|
|
|
result.infoHash = crypto.createHash('sha1')
|
|
.update(result.infoBuffer)
|
|
.digest('hex')
|
|
result.infoHashBuffer = Buffer.from(result.infoHash, 'hex')
|
|
|
|
if (torrent.info.private !== undefined) result.private = !!torrent.info.private
|
|
|
|
if (torrent['creation date']) result.created = new Date(torrent['creation date'] * 1000)
|
|
if (torrent['created by']) result.createdBy = torrent['created by'].toString()
|
|
|
|
if (Buffer.isBuffer(torrent.comment)) result.comment = torrent.comment.toString()
|
|
|
|
// announce and announce-list will be missing if metadata fetched via ut_metadata
|
|
if (Array.isArray(torrent['announce-list']) && torrent['announce-list'].length > 0) {
|
|
torrent['announce-list'].forEach(urls => {
|
|
urls.forEach(url => {
|
|
result.announce.push(url.toString())
|
|
})
|
|
})
|
|
} else if (torrent.announce) {
|
|
result.announce.push(torrent.announce.toString())
|
|
}
|
|
|
|
// handle url-list (BEP19 / web seeding)
|
|
if (Buffer.isBuffer(torrent['url-list'])) {
|
|
// some clients set url-list to empty string
|
|
torrent['url-list'] = torrent['url-list'].length > 0
|
|
? [torrent['url-list']]
|
|
: []
|
|
}
|
|
result.urlList = (torrent['url-list'] || []).map(url => url.toString())
|
|
|
|
// remove duplicates by converting to Set and back
|
|
result.announce = Array.from(new Set(result.announce))
|
|
result.urlList = Array.from(new Set(result.urlList))
|
|
|
|
const files = torrent.info.files || [torrent.info]
|
|
result.files = files.map((file, i) => {
|
|
const parts = [].concat(result.name, file['path.utf-8'] || file.path || []).map(p => p.toString())
|
|
return {
|
|
path: path.join.apply(null, [path.sep].concat(parts)).slice(1),
|
|
name: parts[parts.length - 1],
|
|
length: file.length,
|
|
offset: files.slice(0, i).reduce(sumLength, 0)
|
|
}
|
|
})
|
|
|
|
result.length = files.reduce(sumLength, 0)
|
|
|
|
const lastFile = result.files[result.files.length - 1]
|
|
|
|
result.pieceLength = torrent.info['piece length']
|
|
result.lastPieceLength = ((lastFile.offset + lastFile.length) % result.pieceLength) || result.pieceLength
|
|
result.pieces = splitPieces(torrent.info.pieces)
|
|
|
|
return result
|
|
}
|
|
|
|
function splitPieces (buf) {
|
|
const pieces = []
|
|
for (let i = 0; i < buf.length; i += 20) {
|
|
pieces.push(buf.slice(i, i + 20).toString('hex'))
|
|
}
|
|
return pieces
|
|
}
|
|
|
|
function ensure (bool, fieldName) {
|
|
if (!bool) throw new Error(`Torrent is missing required field: ${fieldName}`)
|
|
}
|
|
|
|
function sumLength (sum, file) {
|
|
return sum + file.length
|
|
}
|