2018-05-17 16:18:20 +02:00
|
|
|
# Content DB
|
|
|
|
# Copyright (C) 2018 rubenwardy
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
|
2018-06-05 20:47:02 +02:00
|
|
|
import flask, json, os, git, tempfile, shutil
|
|
|
|
from git import GitCommandError
|
2018-05-11 13:57:16 +02:00
|
|
|
from flask.ext.sqlalchemy import SQLAlchemy
|
2018-05-15 20:35:59 +02:00
|
|
|
from urllib.error import HTTPError
|
2018-05-11 13:57:16 +02:00
|
|
|
import urllib.request
|
2018-05-11 16:04:17 +02:00
|
|
|
from urllib.parse import urlparse, quote_plus
|
2018-05-11 13:57:16 +02:00
|
|
|
from app import app
|
|
|
|
from app.models import *
|
2018-05-14 02:20:02 +02:00
|
|
|
from app.tasks import celery, TaskError
|
2018-05-15 20:35:59 +02:00
|
|
|
from app.utils import randomString
|
2018-05-11 16:04:17 +02:00
|
|
|
|
2018-06-05 20:59:07 +02:00
|
|
|
|
|
|
|
class GithubURLMaker:
|
|
|
|
def __init__(self, url):
|
|
|
|
# Rewrite path
|
|
|
|
import re
|
|
|
|
m = re.search("^\/([^\/]+)\/([^\/]+)\/?$", url.path)
|
|
|
|
if m is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
user = m.group(1)
|
|
|
|
repo = m.group(2).replace(".git", "")
|
|
|
|
self.baseUrl = "https://raw.githubusercontent.com/{}/{}/master" \
|
|
|
|
.format(user, repo)
|
|
|
|
self.user = user
|
|
|
|
self.repo = repo
|
|
|
|
|
|
|
|
def isValid(self):
|
|
|
|
return self.baseUrl is not None
|
|
|
|
|
|
|
|
def getRepoURL(self):
|
|
|
|
return "https://github.com/{}/{}".format(self.user, self.repo)
|
|
|
|
|
|
|
|
def getScreenshotURL(self):
|
|
|
|
return self.baseUrl + "/screenshot.png"
|
|
|
|
|
|
|
|
def getCommitsURL(self, branch):
|
|
|
|
return "https://api.github.com/repos/{}/{}/commits?sha={}" \
|
|
|
|
.format(self.user, self.repo, urllib.parse.quote_plus(branch))
|
|
|
|
|
|
|
|
def getCommitDownload(self, commit):
|
|
|
|
return "https://github.com/{}/{}/archive/{}.zip" \
|
|
|
|
.format(self.user, self.repo, commit)
|
|
|
|
|
2018-05-12 19:50:09 +02:00
|
|
|
krock_list_cache = None
|
|
|
|
krock_list_cache_by_name = None
|
|
|
|
def getKrockList():
|
|
|
|
global krock_list_cache
|
|
|
|
global krock_list_cache_by_name
|
|
|
|
|
|
|
|
if krock_list_cache is None:
|
|
|
|
contents = urllib.request.urlopen("http://krock-works.16mb.com/MTstuff/modList.php").read().decode("utf-8")
|
|
|
|
list = json.loads(contents)
|
|
|
|
|
|
|
|
def h(x):
|
|
|
|
if not ("title" in x and "author" in x and \
|
|
|
|
"topicId" in x and "link" in x and x["link"] != ""):
|
|
|
|
return False
|
|
|
|
|
|
|
|
import re
|
|
|
|
m = re.search("\[([A-Za-z0-9_]+)\]", x["title"])
|
|
|
|
if m is None:
|
|
|
|
return False
|
|
|
|
|
|
|
|
x["name"] = m.group(1)
|
|
|
|
return True
|
|
|
|
|
|
|
|
def g(x):
|
|
|
|
return {
|
|
|
|
"title": x["title"],
|
|
|
|
"author": x["author"],
|
2018-06-05 01:10:47 +02:00
|
|
|
"name": x["name"],
|
2018-05-12 19:50:09 +02:00
|
|
|
"topicId": x["topicId"],
|
2018-06-05 01:10:47 +02:00
|
|
|
"link": x["link"],
|
2018-05-12 19:50:09 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
krock_list_cache = [g(x) for x in list if h(x)]
|
|
|
|
krock_list_cache_by_name = {}
|
|
|
|
for x in krock_list_cache:
|
|
|
|
if not x["name"] in krock_list_cache_by_name:
|
|
|
|
krock_list_cache_by_name[x["name"]] = []
|
|
|
|
|
|
|
|
krock_list_cache_by_name[x["name"]].append(x)
|
|
|
|
|
|
|
|
return krock_list_cache, krock_list_cache_by_name
|
|
|
|
|
|
|
|
def findModInfo(author, name, link):
|
2018-05-14 14:58:31 +02:00
|
|
|
list, lookup = getKrockList()
|
2018-05-12 19:50:09 +02:00
|
|
|
|
2018-05-14 14:58:31 +02:00
|
|
|
if name is not None and name in lookup:
|
2018-05-12 19:50:09 +02:00
|
|
|
if len(lookup[name]) == 1:
|
|
|
|
return lookup[name][0]
|
|
|
|
|
|
|
|
for x in lookup[name]:
|
|
|
|
if x["author"] == author:
|
|
|
|
return x
|
|
|
|
|
2018-05-14 14:58:31 +02:00
|
|
|
if link is not None and len(link) > 15:
|
|
|
|
for x in list:
|
|
|
|
if link in x["link"]:
|
|
|
|
return x
|
|
|
|
|
2018-05-12 19:50:09 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
2018-05-11 13:57:16 +02:00
|
|
|
def parseConf(string):
|
|
|
|
retval = {}
|
|
|
|
for line in string.split("\n"):
|
|
|
|
idx = line.find("=")
|
|
|
|
if idx > 0:
|
2018-05-14 16:04:02 +02:00
|
|
|
key = line[:idx].strip()
|
2018-05-11 13:57:16 +02:00
|
|
|
value = line[idx+1:].strip()
|
|
|
|
retval[key] = value
|
|
|
|
|
|
|
|
return retval
|
|
|
|
|
2018-05-12 19:50:09 +02:00
|
|
|
|
2018-06-05 01:10:47 +02:00
|
|
|
class PackageTreeNode:
|
|
|
|
def __init__(self, baseDir, author=None, repo=None, name=None):
|
|
|
|
print("Scanning " + baseDir)
|
|
|
|
self.baseDir = baseDir
|
|
|
|
self.author = author
|
|
|
|
self.name = name
|
|
|
|
self.repo = repo
|
|
|
|
self.meta = None
|
|
|
|
self.children = []
|
|
|
|
|
|
|
|
# Detect type
|
|
|
|
type = None
|
|
|
|
is_modpack = False
|
|
|
|
if os.path.isfile(baseDir + "/game.conf"):
|
|
|
|
type = PackageType.GAME
|
|
|
|
elif os.path.isfile(baseDir + "/init.lua"):
|
|
|
|
type = PackageType.MOD
|
|
|
|
elif os.path.isfile(baseDir + "/modpack.txt"):
|
|
|
|
type = PackageType.MOD
|
|
|
|
is_modpack = True
|
|
|
|
elif os.path.isdir(baseDir + "/mods"):
|
|
|
|
type = PackageType.GAME
|
|
|
|
elif os.listdir(baseDir) == []:
|
|
|
|
# probably a submodule
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
raise TaskError("Unable to detect package type!")
|
2018-05-11 13:57:16 +02:00
|
|
|
|
2018-06-05 01:10:47 +02:00
|
|
|
self.type = type
|
|
|
|
self.readMetaFiles()
|
2018-05-11 13:57:16 +02:00
|
|
|
|
2018-06-05 01:10:47 +02:00
|
|
|
if self.type == PackageType.GAME:
|
|
|
|
self.addChildrenFromModDir(baseDir + "/mods")
|
|
|
|
elif is_modpack:
|
|
|
|
self.addChildrenFromModDir(baseDir)
|
2018-05-11 13:57:16 +02:00
|
|
|
|
|
|
|
|
2018-06-05 01:10:47 +02:00
|
|
|
def readMetaFiles(self):
|
|
|
|
result = {}
|
2018-05-12 18:28:04 +02:00
|
|
|
|
2018-06-05 01:10:47 +02:00
|
|
|
# .conf file
|
|
|
|
try:
|
|
|
|
with open(self.baseDir + "/mod.conf", "r") as myfile:
|
|
|
|
conf = parseConf(myfile.read())
|
|
|
|
for key in ["name", "description", "title", "depends", "optional_depends"]:
|
|
|
|
try:
|
|
|
|
result[key] = conf[key]
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
except IOError:
|
|
|
|
print("description.txt does not exist!")
|
|
|
|
|
|
|
|
# description.txt
|
|
|
|
if not "description" in result:
|
2018-05-11 13:57:16 +02:00
|
|
|
try:
|
2018-06-05 01:10:47 +02:00
|
|
|
with open(self.baseDir + "/description.txt", "r") as myfile:
|
|
|
|
result["description"] = myfile.read()
|
|
|
|
except IOError:
|
|
|
|
print("description.txt does not exist!")
|
2018-05-11 13:57:16 +02:00
|
|
|
|
2018-06-05 01:10:47 +02:00
|
|
|
# depends.txt
|
|
|
|
import re
|
|
|
|
pattern = re.compile("^([a-z0-9_]+)\??$")
|
|
|
|
if not "depends" in result and not "optional_depends" in result:
|
|
|
|
try:
|
|
|
|
with open(self.baseDir + "/depends.txt", "r") as myfile:
|
|
|
|
contents = myfile.read()
|
|
|
|
soft = []
|
|
|
|
hard = []
|
|
|
|
for line in contents.split("\n"):
|
|
|
|
line = line.strip()
|
|
|
|
if pattern.match(line):
|
|
|
|
if line[len(line) - 1] == "?":
|
|
|
|
soft.append( line[:-1])
|
|
|
|
else:
|
|
|
|
hard.append(line)
|
2018-05-12 18:28:04 +02:00
|
|
|
|
2018-06-05 01:10:47 +02:00
|
|
|
result["depends"] = hard
|
|
|
|
result["optional_depends"] = soft
|
|
|
|
|
|
|
|
except IOError:
|
|
|
|
print("depends.txt does not exist!")
|
|
|
|
|
|
|
|
else:
|
|
|
|
if "depends" in result:
|
|
|
|
result["depends"] = [x.strip() for x in result["depends"].split(",")]
|
|
|
|
if "optional_depends" in result:
|
|
|
|
result["optional_depends"] = [x.strip() for x in result["optional_depends"].split(",")]
|
|
|
|
|
|
|
|
|
|
|
|
# Calculate Title
|
|
|
|
if "name" in result and not "title" in result:
|
|
|
|
result["title"] = result["name"].replace("_", " ").title()
|
|
|
|
|
|
|
|
# Calculate short description
|
|
|
|
if "description" in result:
|
|
|
|
desc = result["description"]
|
|
|
|
idx = desc.find(".") + 1
|
|
|
|
cutIdx = min(len(desc), 200 if idx < 5 else idx)
|
|
|
|
result["short_description"] = desc[:cutIdx]
|
|
|
|
|
|
|
|
# Get forum ID
|
|
|
|
info = findModInfo(self.author, result.get("name"), self.repo)
|
|
|
|
if info is not None:
|
|
|
|
result["forumId"] = info.get("topicId")
|
|
|
|
|
|
|
|
if "name" in result:
|
|
|
|
self.name = result["name"]
|
|
|
|
del result["name"]
|
|
|
|
|
|
|
|
self.meta = result
|
|
|
|
|
|
|
|
def addChildrenFromModDir(self, dir):
|
|
|
|
for entry in next(os.walk(dir))[1]:
|
|
|
|
path = dir + "/" + entry
|
|
|
|
if not entry.startswith('.') and os.path.isdir(path):
|
|
|
|
self.children.append(PackageTreeNode(path, name=entry))
|
2018-05-11 14:25:42 +02:00
|
|
|
|
2018-05-27 23:03:54 +02:00
|
|
|
|
2018-06-05 01:10:47 +02:00
|
|
|
def fold(self, attr, key=None, acc=None):
|
|
|
|
if acc is None:
|
|
|
|
acc = set()
|
2018-05-27 23:03:54 +02:00
|
|
|
|
2018-06-05 01:10:47 +02:00
|
|
|
if self.meta is None:
|
|
|
|
return acc
|
|
|
|
|
|
|
|
at = getattr(self, attr)
|
|
|
|
value = at if key is None else at.get(key)
|
|
|
|
|
|
|
|
if isinstance(value, list):
|
|
|
|
acc |= set(value)
|
|
|
|
elif value is not None:
|
|
|
|
acc.add(value)
|
|
|
|
|
|
|
|
for child in self.children:
|
|
|
|
child.fold(attr, key, acc)
|
|
|
|
|
|
|
|
return acc
|
|
|
|
|
|
|
|
def get(self, key):
|
|
|
|
return self.meta.get(key)
|
|
|
|
|
|
|
|
|
2018-06-05 23:34:57 +02:00
|
|
|
def cloneRepo(urlstr):
|
2018-06-05 01:10:47 +02:00
|
|
|
gitDir = tempfile.gettempdir() + "/" + randomString(10)
|
2018-06-05 20:47:02 +02:00
|
|
|
|
|
|
|
err = None
|
|
|
|
try:
|
|
|
|
git.Repo.clone_from(urlstr, gitDir, progress=None, env=None, depth=1)
|
|
|
|
except GitCommandError as e:
|
2018-06-05 23:34:57 +02:00
|
|
|
# This is needed to stop the backtrace being weird
|
2018-06-05 20:47:02 +02:00
|
|
|
err = e.stderr
|
|
|
|
|
|
|
|
if err is not None:
|
|
|
|
raise TaskError(err.replace("stderr: ", "") \
|
|
|
|
.replace("Cloning into '" + gitDir + "'...", "") \
|
|
|
|
.strip())
|
2018-06-05 01:10:47 +02:00
|
|
|
|
2018-06-05 23:34:57 +02:00
|
|
|
return gitDir
|
2018-06-05 01:10:47 +02:00
|
|
|
|
2018-06-05 23:34:57 +02:00
|
|
|
@celery.task()
|
|
|
|
def getMeta(urlstr, author):
|
|
|
|
gitDir = cloneRepo(urlstr)
|
|
|
|
tree = PackageTreeNode(gitDir, author=author, repo=urlstr)
|
2018-06-05 20:47:02 +02:00
|
|
|
shutil.rmtree(gitDir)
|
|
|
|
|
2018-06-05 01:10:47 +02:00
|
|
|
result = {}
|
|
|
|
result["name"] = tree.name
|
|
|
|
result["provides"] = tree.fold("name")
|
|
|
|
result["type"] = tree.type.name
|
2018-05-27 23:03:54 +02:00
|
|
|
|
2018-06-05 01:10:47 +02:00
|
|
|
for key in ["depends", "optional_depends"]:
|
|
|
|
result[key] = tree.fold("meta", key)
|
2018-05-27 23:03:54 +02:00
|
|
|
|
2018-06-05 01:10:47 +02:00
|
|
|
for key in ["title", "repo", "issueTracker", "forumId", "description", "short_description"]:
|
|
|
|
result[key] = tree.get(key)
|
2018-05-12 18:28:04 +02:00
|
|
|
|
2018-06-05 01:10:47 +02:00
|
|
|
for mod in result["provides"]:
|
|
|
|
result["depends"].discard(mod)
|
|
|
|
result["optional_depends"].discard(mod)
|
2018-05-27 23:03:54 +02:00
|
|
|
|
2018-06-05 01:10:47 +02:00
|
|
|
for key, value in result.items():
|
|
|
|
if isinstance(value, set):
|
|
|
|
result[key] = list(value)
|
2018-05-12 19:50:09 +02:00
|
|
|
|
2018-05-11 13:57:16 +02:00
|
|
|
return result
|
2018-05-11 16:04:17 +02:00
|
|
|
|
2018-05-15 20:35:59 +02:00
|
|
|
|
2018-05-11 16:04:17 +02:00
|
|
|
@celery.task()
|
|
|
|
def makeVCSRelease(id, branch):
|
|
|
|
release = PackageRelease.query.get(id)
|
2018-05-14 02:20:02 +02:00
|
|
|
|
2018-05-11 16:04:17 +02:00
|
|
|
if release is None:
|
|
|
|
raise TaskError("No such release!")
|
|
|
|
|
|
|
|
if release.package is None:
|
|
|
|
raise TaskError("No package attached to release")
|
|
|
|
|
|
|
|
url = urlparse(release.package.repo)
|
|
|
|
|
|
|
|
urlmaker = None
|
|
|
|
if url.netloc == "github.com":
|
|
|
|
urlmaker = GithubURLMaker(url)
|
|
|
|
else:
|
|
|
|
raise TaskError("Unsupported repo")
|
|
|
|
|
|
|
|
if not urlmaker.isValid():
|
|
|
|
raise TaskError("Invalid github repo URL")
|
|
|
|
|
2018-05-23 19:49:23 +02:00
|
|
|
commitsURL = urlmaker.getCommitsURL(branch)
|
|
|
|
contents = urllib.request.urlopen(commitsURL).read().decode("utf-8")
|
2018-05-11 16:04:17 +02:00
|
|
|
commits = json.loads(contents)
|
|
|
|
|
2018-05-23 19:49:23 +02:00
|
|
|
if len(commits) == 0 or not "sha" in commits[0]:
|
2018-05-11 16:04:17 +02:00
|
|
|
raise TaskError("No commits found")
|
|
|
|
|
|
|
|
release.url = urlmaker.getCommitDownload(commits[0]["sha"])
|
2018-05-23 19:49:23 +02:00
|
|
|
print(release.url)
|
2018-05-11 16:04:17 +02:00
|
|
|
release.task_id = None
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
return release.url
|
2018-05-15 20:35:59 +02:00
|
|
|
|
|
|
|
|
|
|
|
@celery.task()
|
|
|
|
def importRepoScreenshot(id):
|
|
|
|
package = Package.query.get(id)
|
2018-05-25 19:28:24 +02:00
|
|
|
if package is None or package.soft_deleted:
|
2018-05-15 20:35:59 +02:00
|
|
|
raise Exception("Unexpected none package")
|
|
|
|
|
|
|
|
# Get URL Maker
|
2018-06-05 23:34:57 +02:00
|
|
|
gitDir = cloneRepo(package.repo)
|
2018-05-15 20:35:59 +02:00
|
|
|
|
2018-06-05 23:34:57 +02:00
|
|
|
# Find and import screenshot
|
2018-05-15 20:35:59 +02:00
|
|
|
try:
|
2018-06-05 23:34:57 +02:00
|
|
|
for ext in ["png", "jpg", "jpeg"]:
|
|
|
|
sourcePath = gitDir + "/screenshot." + ext
|
|
|
|
if os.path.isfile(sourcePath):
|
|
|
|
filename = randomString(10) + "." + ext
|
|
|
|
destPath = os.path.join("app/public/uploads", filename)
|
|
|
|
shutil.copyfile(sourcePath, destPath)
|
|
|
|
|
|
|
|
ss = PackageScreenshot()
|
|
|
|
ss.approved = True
|
|
|
|
ss.package = package
|
|
|
|
ss.title = "screenshot.png"
|
|
|
|
ss.url = "/uploads/" + filename
|
|
|
|
db.session.add(ss)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
return "/uploads/" + filename
|
|
|
|
finally:
|
|
|
|
shutil.rmtree(gitDir)
|
|
|
|
|
|
|
|
print("screenshot.png does not exist")
|
2018-05-15 20:35:59 +02:00
|
|
|
return None
|
2018-05-27 23:34:24 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def getDepends(package):
|
|
|
|
url = urlparse(package.repo)
|
|
|
|
urlmaker = None
|
|
|
|
if url.netloc == "github.com":
|
|
|
|
urlmaker = GithubURLMaker(url)
|
|
|
|
else:
|
2018-05-28 00:02:11 +02:00
|
|
|
return {}
|
2018-05-27 23:34:24 +02:00
|
|
|
|
|
|
|
result = {}
|
2018-05-28 00:02:11 +02:00
|
|
|
if not urlmaker.isValid():
|
|
|
|
return {}
|
2018-05-27 23:34:24 +02:00
|
|
|
|
2018-05-28 00:02:11 +02:00
|
|
|
#
|
|
|
|
# Try getting depends on mod.conf
|
|
|
|
#
|
|
|
|
try:
|
|
|
|
contents = urllib.request.urlopen(urlmaker.getModConfURL()).read().decode("utf-8")
|
|
|
|
conf = parseConf(contents)
|
|
|
|
for key in ["depends", "optional_depends"]:
|
|
|
|
try:
|
|
|
|
result[key] = conf[key]
|
|
|
|
except KeyError:
|
|
|
|
pass
|
2018-05-27 23:34:24 +02:00
|
|
|
|
2018-05-28 00:02:11 +02:00
|
|
|
except HTTPError:
|
|
|
|
print("mod.conf does not exist")
|
2018-05-27 23:34:24 +02:00
|
|
|
|
2018-05-28 00:02:11 +02:00
|
|
|
if "depends" in result or "optional_depends" in result:
|
|
|
|
return result
|
2018-05-27 23:34:24 +02:00
|
|
|
|
|
|
|
|
2018-05-28 00:02:11 +02:00
|
|
|
#
|
|
|
|
# Try depends.txt
|
|
|
|
#
|
|
|
|
import re
|
|
|
|
pattern = re.compile("^([a-z0-9_]+)\??$")
|
|
|
|
try:
|
|
|
|
contents = urllib.request.urlopen(urlmaker.getDependsURL()).read().decode("utf-8")
|
|
|
|
soft = []
|
|
|
|
hard = []
|
|
|
|
for line in contents.split("\n"):
|
|
|
|
line = line.strip()
|
|
|
|
if pattern.match(line):
|
|
|
|
if line[len(line) - 1] == "?":
|
|
|
|
soft.append( line[:-1])
|
|
|
|
else:
|
|
|
|
hard.append(line)
|
|
|
|
|
|
|
|
result["depends"] = ",".join(hard)
|
|
|
|
result["optional_depends"] = ",".join(soft)
|
|
|
|
except HTTPError:
|
|
|
|
print("depends.txt does not exist")
|
2018-05-27 23:34:24 +02:00
|
|
|
|
2018-05-28 00:02:11 +02:00
|
|
|
return result
|
2018-05-27 23:34:24 +02:00
|
|
|
|
|
|
|
|
|
|
|
def importDependencies(package, mpackage_cache):
|
|
|
|
if Dependency.query.filter_by(depender=package).count() != 0:
|
|
|
|
return
|
|
|
|
|
|
|
|
result = getDepends(package)
|
|
|
|
|
|
|
|
if "depends" in result:
|
|
|
|
deps = Dependency.SpecToList(package, result["depends"], mpackage_cache)
|
|
|
|
print("{} hard: {}".format(len(deps), result["depends"]))
|
|
|
|
for dep in deps:
|
|
|
|
dep.optional = False
|
|
|
|
db.session.add(dep)
|
|
|
|
|
|
|
|
if "optional_depends" in result:
|
|
|
|
deps = Dependency.SpecToList(package, result["optional_depends"], mpackage_cache)
|
|
|
|
print("{} soft: {}".format(len(deps), result["optional_depends"]))
|
|
|
|
for dep in deps:
|
|
|
|
dep.optional = True
|
|
|
|
db.session.add(dep)
|
|
|
|
|
|
|
|
@celery.task()
|
|
|
|
def importAllDependencies():
|
|
|
|
Dependency.query.delete()
|
|
|
|
mpackage_cache = {}
|
|
|
|
packages = Package.query.filter_by(type=PackageType.MOD).all()
|
|
|
|
for i, p in enumerate(packages):
|
|
|
|
print("============= {} ({}/{}) =============".format(p.name, i, len(packages)))
|
|
|
|
importDependencies(p, mpackage_cache)
|
|
|
|
|
|
|
|
db.session.commit()
|