Skip to content

Instantly share code, notes, and snippets.

@Izumiko
Forked from naodesu/resumedat_2015.rb
Last active June 2, 2022 17:28
Show Gist options
  • Save Izumiko/4d1c5c43b4480a2deafe452979e9db83 to your computer and use it in GitHub Desktop.
Save Izumiko/4d1c5c43b4480a2deafe452979e9db83 to your computer and use it in GitHub Desktop.
Convert added torrents data from uTorrent (resume.dat) to qBittorrent (*.fastresume) format
# encoding: utf-8
# USE AT OWN RISK
#
# Follow this guide to install Ruby on Windows (step 1 and 2): https://forwardhq.com/support/installing-ruby-windows
# Install gems (with admin cmd prompt):
# https://rubygems.org/gems/bencode
#
# Edit datpath below to point to your utorrent resume.dat file
#
# Run this file with cmd prompt after installing Ruby
# Start qBittorrent and let all torrents be checked
# Enjoy!
#
Encoding.default_internal = Encoding::UTF_8
Encoding.default_external = Encoding::UTF_8
require 'rubygems'
require 'bencode'
require 'digest'
require 'pp'
require 'win32ole'
require 'fileutils'
require 'json'
require 'pathname'
class Array
def rinclude? (reg)
return self.include?(reg) unless reg.is_a?(Regexp)
self.each do |i|
return true if i.is_a?(String) and i =~ reg
end
return false
end
end
def isUtRunning?
wmi = WIN32OLE.connect("winmgmts://")
procs = wmi.ExecQuery("SELECT * FROM win32_process WHERE Name = 'utorrent.exe'")
return true if procs.Count > 0
return false
end
def yayNay(userPrompt, defaultIsYes=true)
resp = ''
rv = defaultIsYes
begin
puts "\n"
print userPrompt
if defaultIsYes then
print ' [Y/n] '
else
print ' [y/N] '
end
resp = gets
end until resp =~ /^($|[yn]|yes|no)$/i
rv = true if resp =~ /^y/i
rv = false if resp =~ /^n/i
return rv
end
def prompt(userPrompt, default=nil)
resp = ''
rv = default
begin
puts "\n"
print userPrompt
unless default.nil? or default.empty? then
print " [#{default}] "
else
print ' '
end
resp = gets
break unless resp.chomp.empty?
break unless default.nil? or default.empty?
end while true
rv = resp unless resp.chomp.empty?
return rv.chomp
end
def backupResumeDat(datpath)
newname = ''
begin
newname = "#{datpath}.#{Time.now.strftime("%Y-%m-%d.%H-%M-%S.bak")}"
end while File.exists?(newname)
FileUtils.cp(datpath, newname)
end
def tPunchup(res, tor, dotracker = true, doname = true)
rv = tor.dup
if dotracker then
utrax = res['trackers']
# puts utrax
if utrax.size > 1 then
unless rv['announce-list'].nil?
unless rv['announce-list'].flatten.uniq.sort == utrax.uniq.sort then
rv['announce-list'] = utrax.uniq.map {|x| [x]}
end
else
unless rv['announce'] == utrax.first then
rv['announce'] = utrax.first
end
end
end
end
if doname then
path = Pathname.new(res['path'])
single = !(rv['info'].has_key?('files'))
unless single
tname = rv['info']['name']
rv['info']['name'] = path.basename.to_s
end
end
rv
end
def mkfr(res, tor)
frestmpl = {
'active_time' => 0 , # runtime
'added_time' => 0 , # added_on
'allocation' => 'full' ,
'announce_to_dht' => 1 ,
'announce_to_lsd' => 1 ,
'announce_to_trackers' => 1 ,
'auto_managed' => 1 ,
'banned_peers' => '' ,
'banned_peers6' => '' ,
'blocks per piece' => 0 , # ("piece length" from .torrent) / ("block size" resume.dat) - range [1,256]
'completed_time' => 0 , # completed_on
'download_rate_limit' => 0 ,
'file sizes' => [
[
0 , # File 1, size in bytes
0 # File 1, modified date (timestamp) or (modtimes array in resume)
] ,
[
0 , # File 2, size in bytes
0 # File 2, mtime (ts)
] ,
[
0 ,
0
]
] ,
'file-format' => 'libtorrent resume file' , # req
'file-version' => 1 , # req
'file_priority' => [ # prio bitfield
2 , # File 1, High
0 , # File 2, Do not download
1 # File 3, Normal
] ,
'finished_time' => 0 ,
'info-hash' => '', # Digest::SHA1.digest('infohashbinarydata') , # tdb['info'].bencode
'last_download' => 0 ,
'last_scrape' => 0 ,
'last_seen_complete' => 0 ,
'last_upload' => 0 ,
'libtorrent-version' => '1.1.11.0' ,
'mapped_files' => [
'relative\path\to\file1.ext' , # File 1
'r\p\t\file2.ext' , # File 2
'file3.ext' # File 3
] ,
'max_connections' => 100 ,
'max_uploads' => 16777215 ,
'num_downloaders' => 16777215 ,
'num_incomplete' => 0 ,
'num_seeds' => 0 ,
'paused' => 0, # 'started' - 0 = stopped, 1 = force , 2 = start
'peers' => '' ,
'peers6' => '' ,
'piece_priority' => '' , # "\x01"*1399 , # * num pieces?
'pieces' => '', #"\x01"*1399 , # * num pieces?
'seed_mode' => 0 ,
'seeding_time' => 0 ,
'sequential_download' => 0 ,
'super_seeding' => 0 ,
'total_downloaded' => 0 , # downloaded field
'total_uploaded' => 0 , # uploaded field
'upload_rate_limit' => 0 , #upspeed
'trackers' => [
[
'https://tracker' # direct match to trackers
]
]
}
fr = frestmpl.dup
npieces = tor['info']['pieces'].size / 20 # SHA1 hash is 20 bytes
fr['added_time'] = res['added_on'].to_i
fr['completed_time'] = res['completed_on'].to_i
fr['active_time'] = res['runtime'].to_i
fr['seeding_time'] = fr['active_time']
fr['blocks per piece'] = tor['info']['piece length'].to_i / res['blocksize'].to_i
fr['info-hash'] = Digest::SHA1.digest(tor['info'].bencode)
fr['paused'] = 1 # Always add torrents in paused state
# fr['paused'] = 1 if res['started'] == 0
# puts res['started']
# puts " paused : "+fr['paused'].to_s
fr['auto_managed'] = 0
fr['total_downloaded'] = res['downloaded'].to_i
fr['total_uploaded'] = res['uploaded'].to_i
fr['upload_rate_limit'] = res['upspeed'].to_i
fr['trackers'] = res['trackers'].map {|tracker| [tracker] }
fr['piece_priority'] = res['have'].unpack('b*').first.scan(/./).map {|i| (i.to_i(2)*1) }.pack('C'*npieces) #= "\x01" * npieces
#~ fr['pieces'] = res['have'].unpack('b*').first.scan(/./).pack('b'*npieces)
fr['pieces'] = res['have'].unpack('b*').first.scan(/./).map {|i| (i.to_i(2)*1) }.pack('C'*npieces)
fr['finished_time'] = (Time.now - Time.at(fr['completed_time'])).to_i
unless fr['finished_time'].to_i == 0 then
fr['last_seen_complete'] = Time.now.to_i
end
#~ fr['finished_time'] = fr['completed_time']
fr['last_download'] = fr['finished_time']
fr['last_scrape'] = fr['finished_time']
fr['last_upload'] = fr['finished_time']
# Per file fields:
##########
# mapped_files
# file_priority
# file sizes
fr['file_priority'] = res['prio'].unpack('H*').first.scan(/../).map do |h|
next 1 if (1..8) === h.to_i(16)
next 2 if (9..15) === h.to_i(16)
next 0
end
fr['mapped_files'] = []
fr['file sizes'] = []
fmt = 0
unless tor['info']['files'].nil? then
tor['info']['files'].each_index do |findex|
tfile = tor['info']['files'][findex]
fr['mapped_files'] << Pathname.new('').join(*(tfile['path'])).to_s.force_encoding('UTF-8').gsub('/', '\\')
unless res['modtimes'].nil? then
fmt = res['modtimes'][findex].to_i # file time to avoid checking / not presen in ut2.2
else
fmt = 0
end
thisFile = Pathname.new(res['path'].to_s.force_encoding('UTF-8').gsub('/', '\\')).join(fr['mapped_files'].last)
if thisFile.exist? then
fmt = thisFile.mtime.to_i unless fmt > 0
# puts " ------debug ---------"
# puts thisFile.to_s
# puts " File ok"
#~ puts thisFile.mtime.to_i
else # if a file is missing then set state to stopped
fr['paused'] = 1
# puts " ------debug ---------"
# puts tfile.to_s
# puts thisFile.to_s
# puts " File missing, paused"
end
# puts " - file: " + thisFile.to_s + fmt.to_s
if fr['file_priority'][findex] !=0 then
fr['file sizes'] << [ tfile['length'].to_i, fmt ]
else
# puts thisFile.to_s
fr['file sizes'] << [ 0, 0 ]
end
end
fr['mapped_files'] = []
savepath = Pathname.new(res['path']).dirname.to_s.force_encoding('UTF-8')
savepath << '\\' unless savepath.end_with?('\\')
fr['save_path'] = savepath.to_s.force_encoding('ASCII-8BIT') #res['path']
savepath2 = Pathname.new(res['path']).dirname.to_s.force_encoding('UTF-8').gsub('\\', '/')
savepath2 << '/' unless savepath2.end_with?('/')
fr['qBt-savePath'] = savepath2.to_s.force_encoding('ASCII-8BIT') #res['path']
# puts "---- debug start -----"
# puts " Multi file torrent"
# puts " qBt-savePath: " + savepath2.to_s
# puts " save_path: " + savepath.to_s
# puts "----- debug end ------"
else
savepath = Pathname.new(res['path']).dirname.to_s.force_encoding('UTF-8').gsub('\\', '/')
savepath << '/' unless savepath.end_with?('/')
fr['qBt-savePath'] = savepath.to_s.force_encoding('ASCII-8BIT')
# puts "---- debug start -----"
# puts " Single file torrent"
# puts " qBt-savePath: " + savepath.to_s
# puts " full path: " + res['path'].to_s
# puts " single path: " + Pathname.new(res['path']).dirname.to_s
# puts " file name: " + tor['info']['name'].to_s
# puts " size: " + tor['info']['length'].to_s
# puts "----- debug end ------"
# fr['mapped_files'] << tor['info']['name'].to_s.gsub('\\', '/')
unless res['modtimes'].nil? then
fmt = res['modtimes'][0].to_i # file time to avoid checking / not presen in ut2.2
else
fmt = 0
end
thisFile = Pathname.new(res['path'])
if thisFile.exist? then
fmt = thisFile.mtime.to_i unless fmt > 0
# puts " filetime: " + thisFile.mtime.to_s
end
if fr['file_priority'][0] !=0 then
fr['file sizes'] << [ tor['info']['length'].to_i , fmt ]
else
# puts thisFile.to_s
fr['file sizes'] << [ 0, 0 ]
end
# puts tor['info']['name'].to_s
# puts tor['info'].to_s
# puts "---------"
end
##########
# qBittorrent 3.1+ Fields
##########
# fr['qBt-savePath'] = res['path'] # handled above
#~ fr['qBt-ratioLimit'] = '' # Blank because qBt actions on limit aren't as diverse
fr['qBt-label'] = res['label'].to_s.force_encoding('ASCII-8BIT') if res.has_key?('label')
fr['qBt-queuePosition'] = -1 # -1 for completed
fr['qBt-seedDate'] = fr['completed_time']
fr['qBt-ratioLimit'] = '-2' # -2 = Use Global, -1 = No limit, other number = actual ratio?
#~ fr['qBt-ratioLimit'] = (res['wanted_ratio'].to_f / 1000).to_s
# ^ uTorrent has this setting but the actions you can take are different,
# so you might not want to bring this over.
fr
end
#########################
#########################
if not defined?(Ocra)
# Check required conditions
if ARGV.count != 2
puts "\n"
puts "Usage ut2qt.exe \"C:\\Users\\Administrator\\AppData\\Roaming\\uTorrent\\resume.dat\" \"C:\\Users\\Administrator\\AppData\\Local\\qBittorrent\\BT_backup\\\""
puts "Don't forget to delete qBittorrent-resume.ini from "+ ENV['APPDATA']+"\\qBittorrent\ afterwards!"
puts "\n"
exit(-1)
end
end
if not defined?(Ocra) then
datpath=ARGV[0]
outpath=ARGV[1].chomp("\\").chomp("\"")
else
# datpath = "G:\\uTorrent\\resume.dat" # EDIT THIS!
# outpath = 'D:\Temp\BT_backup'
datpath = "C:\\Users\\Administrator\\AppData\\Roaming\\uTorrent\\resume.dat" # EDIT THIS!
outpath = 'C:\Users\ADMINI~1\AppData\Local\QBITTO~1\BT_BAC~1'
end;
########################
########################
if isUtRunning? then
puts "uTorrent is still running! Close it and wait for the process to disappear before running this again."
exit -1
end
#datpath = "C:\\Users\\Administrator\\AppData\\Roaming\\uTorrent\\resume.dat" # EDIT THIS!
print "Backing up dat file..."
backupResumeDat(datpath)
puts "complete."
print "Parsing dat file..."
q = BEncode.load_file(datpath)
puts "complete."
puts "Consistency check..."
torrents = {}
q.each do |k,v|
#~ raise "Not a hash" unless v.is_a?(Hash)
next unless v.is_a?(Hash)
raise "No caption" unless v.has_key?('caption')
raise "#{v['caption']} :- No added on" unless v.has_key?('added_on')
raise "#{v['caption']} :- No completed on" unless v.has_key?('completed_on')
torrents[k] = v
end
puts "...Done\n"
#~ torrents.sort_by! {|v| v['completed_on'].to_i }
c = 0
d = 0
#outpath = 'C:\Temp\BT_backup'
raise "'#{outpath}' does not exist." unless File.exist?(outpath)
puts "Found this many torrents: "
puts torrents.length
total = torrents.length
puts "\nProcessing, please wait..."
torrents.each do |k, v|
d += 1
tf = Pathname.new(datpath).dirname + k
begin
q = BEncode.load_file(tf.to_s.force_encoding('UTF-8'))
rescue => error
puts "load_file error, skipping: "
puts k.to_s
# puts tf.to_s.encoding.to_s
puts "error: " + error.message
puts "backtrace: " + error.backtrace.to_s
next
end
puts d.to_s+"/"+total.to_s+ " " + k
outp = Pathname.new(outpath)
tfile = tPunchup(v, q, false, false)
hash = Digest::SHA1.hexdigest(tfile['info'].bencode).downcase
if outp.join("#{hash}.torrent").exist? || outp.join("#{hash}.fastresume").exist?
#puts "Torrent processed already, skipping: " + k
next
end
# ^ Skip this if it has been done so that subsequent runs don't re-process
begin
frfile = mkfr(v, tfile)
rescue => error
puts "mkfr error, skipping: " + k
puts "error: " + error.message
puts "backtrace: " + error.backtrace.to_s
next
end
begin
f = File.new(outp.join("#{hash}.torrent").to_s.force_encoding('UTF-8'), 'wb')
f.write(tfile.bencode)
f.close
f = File.new(outp.join("#{hash}.fastresume").to_s.force_encoding('UTF-8'), 'wb')
f.write(frfile.bencode)
f.close
c += 1
rescue => error
puts "output error, skipping: " + k
puts "error: " + error.message
puts "backtrace: " + error.backtrace.to_s
puts frfile.to_s
next
end
end
puts "Done! Exported this many torrents: "
puts c
@Kupie
Copy link

Kupie commented Jun 2, 2022

Thank you so much for updating this!

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment