publish on GitHub
[jfdi-cmpdl.git] / jfdicmpdl.py
blobba3dc212be8caef26a5984e1cdac96d2737542da
1 #!/usr/bin/env python3
2 # CurseForge modpack downloader
3 # This program is an alternative to the Twitch client, written for Linux users,
4 # so that they can install Minecraft modpacks from CurseForge.
5 # This tool requires that the user download the pack zip from CurseForge. It
6 # will then generate a complete modpack directory that can be imported into
7 # a launcher of the user's choice.
9 # Please see the included README file for more info.
11 import os
12 import sys
13 import requests
14 import json
15 import asyncio
16 import subprocess
17 import time
18 import random
19 import shutil
20 import argparse
21 import tempfile
22 import traceback
23 import hashlib
24 from concurrent.futures import ThreadPoolExecutor
25 from zipfile import ZipFile
28 API_URL = 'https://api.modpacks.ch/public'
29 WORKERS = 6
30 REQUESTS_PER_SEC = 4
31 SLEEP_SECONDS = WORKERS / REQUESTS_PER_SEC
34 def main(zipfile, *, packdata_dir, mc_dir=None):
35 # Extract pack
36 packname = os.path.splitext(zipfile)[0]
37 packname = os.path.basename(packname)
39 if not mc_dir:
40 if not os.path.isdir('packs/'):
41 os.mkdir('packs/')
42 mc_dir = 'packs/' + packname
43 # Generate minecraft environment
44 print("Output directory is '%s'" % mc_dir)
45 if os.path.isdir(mc_dir):
46 if os.listdir(mc_dir):
47 print("Error: Output directory already exists and is not empty")
48 return
49 else:
50 print("Output directory exists (and is empty)")
51 else:
52 print("Creating output directory")
53 os.mkdir(mc_dir)
56 print("Extracting %s" % zipfile)
57 with ZipFile(zipfile, 'r') as zip:
58 zip.extractall(packdata_dir)
60 try:
61 with open(packdata_dir + '/manifest.json', 'r') as mf:
62 manifest = json.load(mf)
63 except (json.JsonDecodeError, OSError) as e:
64 print("Manifest file not found or was corrupted.")
65 print(e)
66 return
68 ml_message = 'You then need to install: '
69 for modloader in manifest['minecraft']['modLoaders']:
70 ml_message = ml_message + modloader['id'] + " "
72 # Download mods
73 print("Downloading mods")
74 if not os.path.isdir('.modcache'):
75 os.mkdir('.modcache')
77 # if not os.path.isdir('node_modules'):
78 # print("Installing NodeJS dependencies")
79 # subprocess.run(['npm', 'install'])
80 # subprocess.run(['node', 'mod_download.js', packdata_dir + '/manifest.json', '.modcache', packdata_dir + '/mods.json'])
82 mods, manual_downloads = download_all_mods(packdata_dir + '/manifest.json', '.modcache')
84 print("Copying mods")
85 os.mkdir(mc_dir + '/mods')
86 os.mkdir(mc_dir + '/resources')
88 # TODO detect texture packs
89 #for mod in mods:
90 # jar = mod[0]
91 # type = mod[1]
92 # if type == 'mc-mods':
93 # modfile = mc_dir + '/mods/' + os.path.basename(jar)
94 # if not os.path.exists(modfile):
95 # cp_safe(os.path.abspath(jar), modfile)
96 # elif type == 'texture-packs':
97 # print("Extracting texture pack %s" % jar)
98 # with tempfile.TemporaryDirectory() as texpack_dir:
99 # with ZipFile(jar, 'r') as zip:
100 # zip.extractall(texpack_dir)
101 # for dir in os.listdir(texpack_dir + '/assets'):
102 # f = texpack_dir + '/assets/' + dir
103 # cp_safe(f, mc_dir + '/resources/' + dir)
104 # else:
105 # print("Unknown file type %s" % type)
106 # sys.exit(1)
108 # Copy overrides
109 override_dir = packdata_dir + '/overrides/'
110 if os.path.exists(override_dir):
111 print("Copying overrides")
112 for dir in os.listdir(override_dir):
113 print(dir + "...")
114 cp_safe(override_dir + dir, mc_dir + '/' + dir)
116 else:
117 print("Copying overrides [nothing to do]")
119 print("Done!\n\n\n\nThe modpack has been downloaded to: " + mc_dir)
120 print(ml_message)
121 if len(manual_downloads) > 0:
122 msg=""
123 msg+="====MANUAL DOWNLOAD REQUIRED====\n"
124 msg+="The following mods failed to download\n"
125 msg+="Please download them manually and place them in " + mc_dir + "/mods\n"
126 for url, resp in manual_downloads:
127 msg+="* %s\n" % url
128 print(msg[:-1])
129 with open(mc_dir + '/MANUAL-DOWNLOAD-README.txt', 'w') as f:
130 f.write(msg)
134 # MOD DOWNLOADING
136 def get_json(session, url, logtag):
137 gotit = False
138 print(logtag + "GET (json) " + url)
139 for tout in [4,5,10,20,30]:
140 try:
141 r = session.get(url, timeout=tout)
142 gotit = True
143 break
144 except requests.Timeout as e:
145 print(logtag + "timeout %02d %s" % (tout, url))
146 if not gotit:
147 try:
148 print(logtag + "GET (json, long timeout) " + url)
149 r = session.get(url, timeout=120)
150 gotit = True
151 except requests.Timeout as e:
152 print(logtag + "timeout")
153 traceback.print_exc()
154 print(logtag + "Error timeout trying to access %s" % url)
155 return None
157 return json.loads(r.text)
159 def fetch_mod(session, f, out_dir, logtag, attempt):
160 rnd = random.random() * SLEEP_SECONDS
161 time.sleep(rnd)
162 try:
163 pid = f['projectID']
164 fid = f['fileID']
165 project_info = get_json(session, API_URL + ('/mod/%d' % pid), logtag)
166 if project_info is None:
167 print(logtag + "fetch failed")
168 return (f, 'error')
170 file_type = "mc-mods"
171 info = [x for x in project_info["versions"] if x["id"] == fid]
173 if len(info) != 1:
174 print(logtag + "Could not find mod jar for pid:%s fid:%s, got %s results" % (pid, fid, len(info)))
175 return (f, 'dist-error' if attempt == "retry" else 'error', project_info)
176 info = info[0]
178 fn = info['name']
179 dl = info['url']
180 sha1_expected = info['sha1'].lower()
181 out_file = out_dir + '/' + fn
183 if os.path.exists(out_file):
184 if os.path.getsize(out_file) == info['size'] and sha1_expected == sha1(out_file):
185 print(logtag + "%s OK cached" % fn)
186 return (out_file, file_type)
188 status = download(dl, out_file, session=session, progress=False)
189 time.sleep(SLEEP_SECONDS - rnd)
190 if sha1_expected != sha1(out_file):
191 print(logtag + "download failed (SHA1 mismatch!)" % status)
192 return (f, 'error')
193 if status != 200:
194 print(logtag + "download failed (error %d)" % status)
195 return (f, 'error')
196 print(logtag + "%s OK downloaded" % fn)
197 return (out_file, file_type)
198 except:
199 print(logtag + "download failed (exception)")
200 traceback.print_exc()
201 return (f, 'dist-error' if attempt == "retry" else 'error', project_info)
203 async def download_mods_async(manifest, out_dir):
204 with ThreadPoolExecutor(max_workers=WORKERS) as executor, \
205 requests.Session() as session:
206 loop = asyncio.get_event_loop()
207 tasks = []
208 maxn = len(manifest['files'])
210 print("Downloading %s mods" % maxn)
211 for n, f in enumerate(manifest['files']):
212 logtag = "[" + str(n+1) + "/" + str(maxn) + "] "
213 task = loop.run_in_executor(executor, fetch_mod, *(session, f, out_dir, logtag, "first attempt"))
214 tasks.append(task)
216 jars = []
217 manual_downloads = []
218 while len(tasks) > 0:
219 retry_tasks = []
221 for resp in await asyncio.gather(*tasks):
222 if resp[1] == 'error':
223 print("failed to fetch %s, retrying later" % resp[0])
224 retry_tasks.append(resp[0])
225 elif resp[1] == 'dist-error':
226 print(resp[2])
227 manual_dl_url = resp[2]['links'][0]['link'] + '/download/' + str(resp[0]['fileID'])
228 manual_downloads.append((manual_dl_url, resp))
229 # add to jars list so that the file gets linked
230 jars.append(resp[3:])
231 else:
232 jars.append(resp)
234 tasks = []
235 if len(retry_tasks) > 0:
236 print("retrying...")
237 time.sleep(2)
238 for f in retry_tasks:
239 tasks.append(loop.run_in_executor(executor, fetch_mod, *(session, f, out_dir, logtag, "retry")))
240 return jars, manual_downloads
243 def download_all_mods(manifest_json, mods_dir):
244 mod_jars = []
245 with open(manifest_json, 'r') as f:
246 manifest = json.load(f)
248 loop = asyncio.get_event_loop()
249 future = asyncio.ensure_future(download_mods_async(manifest, mods_dir))
250 loop.run_until_complete(future)
251 return future.result()
253 def status_bar(text, progress, bar_width=0.5, show_percent=True, borders='[]', progress_ch='#', space_ch=' '):
254 ansi_el = '\x1b[K\r' # escape code to clear the rest of the line plus carriage return
255 term_width = shutil.get_terminal_size().columns
256 if term_width < 10:
257 print(end=ansi_el)
258 return
259 bar_width_c = max(int(term_width * bar_width), 4)
260 text_width = min(term_width - bar_width_c - 6, len(text)) # subract 4 characters for percentage and 2 spaces
261 text_part = '' if (text_width == 0) else text[-text_width:]
263 progress_c = int(progress * (bar_width_c - 2))
264 remaining_c = bar_width_c - 2 - progress_c
265 padding_c = max(0, term_width - bar_width_c - text_width - 6)
267 bar = borders[0] + progress_ch * progress_c + space_ch * remaining_c + borders[1]
268 pad = ' ' * padding_c
269 print("%s %s%3.0f%% %s" % (text_part, pad, (progress * 100), bar), end=ansi_el)
271 def download(url, dest, progress=False, session=None):
272 try:
273 if session is not None:
274 r = session.get(url, stream=True)
275 else:
276 r = requests.get(url, stream=True)
277 size = int(r.headers['Content-Length'])
279 if r.status_code != 200:
280 return r.status_code
282 with open(dest, 'wb') as f:
283 if progress:
284 n = 0
285 for chunk in r.iter_content(1048576):
286 f.write(chunk)
287 n += len(chunk)
288 status_bar(url, n / size)
289 else:
290 f.write(r.content)
291 except requests.RequestException:
292 return -1
293 except OSError:
294 return -2
296 if progress:
297 print()
299 return r.status_code
301 def cp_safe(src, dst):
302 if os.path.exists(dst):
303 raise FileExistsError("Cannot copy '%s' -> '%s' because the destination already exists" % (src, dst))
304 if os.path.isdir(src):
305 shutil.copytree(src, dst)
306 else:
307 shutil.copyfile(src, dst)
309 def sha1(src):
310 h = hashlib.sha1()
311 with open(src, 'rb') as f:
312 while True:
313 data = f.read(4096)
314 if not data:
315 break
316 h.update(data)
317 return h.hexdigest()
319 # And, of course, the main:
321 if __name__ == "__main__":
322 parser = argparse.ArgumentParser()
323 parser.add_argument('zipfile')
324 parser.add_argument('--outdir', dest='outdir')
325 args = parser.parse_args(sys.argv[1:])
326 with tempfile.TemporaryDirectory() as packdata_dir:
327 main(args.zipfile, packdata_dir=packdata_dir, mc_dir=args.outdir)