2018-05-17 16:18:20 +02:00
|
|
|
# Content DB
|
|
|
|
# Copyright (C) 2018 rubenwardy
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
|
2019-11-16 00:51:42 +01:00
|
|
|
import flask, json, os, git, tempfile, shutil, gitdb
|
2018-06-05 20:47:02 +02:00
|
|
|
from git import GitCommandError
|
2020-01-19 01:28:26 +01:00
|
|
|
from git_archive_all import GitArchiver
|
2018-12-23 00:03:38 +01:00
|
|
|
from flask_sqlalchemy import SQLAlchemy
|
2018-05-15 20:35:59 +02:00
|
|
|
from urllib.error import HTTPError
|
2018-05-11 13:57:16 +02:00
|
|
|
import urllib.request
|
2018-06-07 23:32:17 +02:00
|
|
|
from urllib.parse import urlparse, quote_plus, urlsplit
|
2020-01-19 02:37:15 +01:00
|
|
|
from zipfile import ZipFile
|
|
|
|
|
2018-05-11 13:57:16 +02:00
|
|
|
from app import app
|
|
|
|
from app.models import *
|
2018-05-14 02:20:02 +02:00
|
|
|
from app.tasks import celery, TaskError
|
2018-05-15 20:35:59 +02:00
|
|
|
from app.utils import randomString
|
2020-01-19 02:22:33 +01:00
|
|
|
from .minetestcheck import build_tree, MinetestCheckError, ContentType
|
|
|
|
from .minetestcheck.config import parse_conf
|
2018-06-05 20:59:07 +02:00
|
|
|
|
|
|
|
class GithubURLMaker:
|
|
|
|
def __init__(self, url):
|
2019-08-09 12:17:39 +02:00
|
|
|
self.baseUrl = None
|
|
|
|
self.user = None
|
|
|
|
self.repo = None
|
|
|
|
|
2018-06-05 20:59:07 +02:00
|
|
|
# Rewrite path
|
|
|
|
import re
|
|
|
|
m = re.search("^\/([^\/]+)\/([^\/]+)\/?$", url.path)
|
|
|
|
if m is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
user = m.group(1)
|
|
|
|
repo = m.group(2).replace(".git", "")
|
|
|
|
self.baseUrl = "https://raw.githubusercontent.com/{}/{}/master" \
|
|
|
|
.format(user, repo)
|
|
|
|
self.user = user
|
|
|
|
self.repo = repo
|
|
|
|
|
|
|
|
def isValid(self):
|
|
|
|
return self.baseUrl is not None
|
|
|
|
|
|
|
|
def getRepoURL(self):
|
|
|
|
return "https://github.com/{}/{}".format(self.user, self.repo)
|
|
|
|
|
|
|
|
def getScreenshotURL(self):
|
|
|
|
return self.baseUrl + "/screenshot.png"
|
|
|
|
|
2019-08-09 12:27:54 +02:00
|
|
|
def getModConfURL(self):
|
|
|
|
return self.baseUrl + "/mod.conf"
|
|
|
|
|
2018-06-05 20:59:07 +02:00
|
|
|
def getCommitsURL(self, branch):
|
|
|
|
return "https://api.github.com/repos/{}/{}/commits?sha={}" \
|
|
|
|
.format(self.user, self.repo, urllib.parse.quote_plus(branch))
|
|
|
|
|
|
|
|
def getCommitDownload(self, commit):
|
|
|
|
return "https://github.com/{}/{}/archive/{}.zip" \
|
|
|
|
.format(self.user, self.repo, commit)
|
|
|
|
|
2018-05-12 19:50:09 +02:00
|
|
|
krock_list_cache = None
|
|
|
|
krock_list_cache_by_name = None
|
|
|
|
def getKrockList():
|
|
|
|
global krock_list_cache
|
|
|
|
global krock_list_cache_by_name
|
|
|
|
|
|
|
|
if krock_list_cache is None:
|
2018-12-21 15:02:57 +01:00
|
|
|
contents = urllib.request.urlopen("https://krock-works.uk.to/minetest/modList.php").read().decode("utf-8")
|
2018-05-12 19:50:09 +02:00
|
|
|
list = json.loads(contents)
|
|
|
|
|
|
|
|
def h(x):
|
|
|
|
if not ("title" in x and "author" in x and \
|
|
|
|
"topicId" in x and "link" in x and x["link"] != ""):
|
|
|
|
return False
|
|
|
|
|
|
|
|
import re
|
|
|
|
m = re.search("\[([A-Za-z0-9_]+)\]", x["title"])
|
|
|
|
if m is None:
|
|
|
|
return False
|
|
|
|
|
|
|
|
x["name"] = m.group(1)
|
|
|
|
return True
|
|
|
|
|
|
|
|
def g(x):
|
|
|
|
return {
|
|
|
|
"title": x["title"],
|
|
|
|
"author": x["author"],
|
2018-06-05 01:10:47 +02:00
|
|
|
"name": x["name"],
|
2018-05-12 19:50:09 +02:00
|
|
|
"topicId": x["topicId"],
|
2018-06-05 01:10:47 +02:00
|
|
|
"link": x["link"],
|
2018-05-12 19:50:09 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
krock_list_cache = [g(x) for x in list if h(x)]
|
|
|
|
krock_list_cache_by_name = {}
|
|
|
|
for x in krock_list_cache:
|
|
|
|
if not x["name"] in krock_list_cache_by_name:
|
|
|
|
krock_list_cache_by_name[x["name"]] = []
|
|
|
|
|
|
|
|
krock_list_cache_by_name[x["name"]].append(x)
|
|
|
|
|
|
|
|
return krock_list_cache, krock_list_cache_by_name
|
|
|
|
|
|
|
|
def findModInfo(author, name, link):
|
2018-05-14 14:58:31 +02:00
|
|
|
list, lookup = getKrockList()
|
2018-05-12 19:50:09 +02:00
|
|
|
|
2018-05-14 14:58:31 +02:00
|
|
|
if name is not None and name in lookup:
|
2018-05-12 19:50:09 +02:00
|
|
|
if len(lookup[name]) == 1:
|
|
|
|
return lookup[name][0]
|
|
|
|
|
|
|
|
for x in lookup[name]:
|
|
|
|
if x["author"] == author:
|
|
|
|
return x
|
|
|
|
|
2018-05-14 14:58:31 +02:00
|
|
|
if link is not None and len(link) > 15:
|
|
|
|
for x in list:
|
|
|
|
if link in x["link"]:
|
|
|
|
return x
|
|
|
|
|
2018-05-12 19:50:09 +02:00
|
|
|
return None
|
|
|
|
|
2018-06-07 23:32:17 +02:00
|
|
|
def generateGitURL(urlstr):
|
|
|
|
scheme, netloc, path, query, frag = urlsplit(urlstr)
|
|
|
|
|
|
|
|
return "http://:@" + netloc + path + query
|
2018-06-05 01:10:47 +02:00
|
|
|
|
2020-01-19 02:37:15 +01:00
|
|
|
|
|
|
|
def getTempDir():
|
|
|
|
return os.path.join(tempfile.gettempdir(), randomString(10))
|
|
|
|
|
|
|
|
|
2018-06-06 00:13:39 +02:00
|
|
|
# Clones a repo from an unvalidated URL.
|
|
|
|
# Returns a tuple of path and repo on sucess.
|
|
|
|
# Throws `TaskError` on failure.
|
|
|
|
# Caller is responsible for deleting returned directory.
|
|
|
|
def cloneRepo(urlstr, ref=None, recursive=False):
|
2020-01-19 02:37:15 +01:00
|
|
|
gitDir = getTempDir()
|
2018-06-05 20:47:02 +02:00
|
|
|
|
|
|
|
err = None
|
|
|
|
try:
|
2018-06-07 23:32:17 +02:00
|
|
|
gitUrl = generateGitURL(urlstr)
|
|
|
|
print("Cloning from " + gitUrl)
|
2019-09-15 19:30:42 +02:00
|
|
|
|
|
|
|
if ref is None:
|
|
|
|
repo = git.Repo.clone_from(gitUrl, gitDir, \
|
|
|
|
progress=None, env=None, depth=1, recursive=recursive, kill_after_timeout=15)
|
|
|
|
else:
|
2020-01-25 02:07:39 +01:00
|
|
|
repo = git.Repo.init(gitDir)
|
|
|
|
origin = repo.create_remote("origin", url=gitUrl)
|
|
|
|
assert origin.exists()
|
|
|
|
origin.fetch()
|
2020-01-25 03:24:26 +01:00
|
|
|
origin.pull(ref)
|
2018-06-07 23:32:17 +02:00
|
|
|
|
2020-03-27 16:23:18 +01:00
|
|
|
for submodule in repo.submodules:
|
|
|
|
submodule.update(init=True)
|
|
|
|
|
2018-06-06 00:13:39 +02:00
|
|
|
return gitDir, repo
|
2019-08-31 23:09:19 +02:00
|
|
|
|
2018-06-05 20:47:02 +02:00
|
|
|
except GitCommandError as e:
|
2018-06-05 23:34:57 +02:00
|
|
|
# This is needed to stop the backtrace being weird
|
2018-06-05 20:47:02 +02:00
|
|
|
err = e.stderr
|
|
|
|
|
2019-08-31 23:09:19 +02:00
|
|
|
except gitdb.exc.BadName as e:
|
|
|
|
err = "Unable to find the reference " + (ref or "?") + "\n" + e.stderr
|
|
|
|
|
2018-06-06 00:13:39 +02:00
|
|
|
raise TaskError(err.replace("stderr: ", "") \
|
|
|
|
.replace("Cloning into '" + gitDir + "'...", "") \
|
|
|
|
.strip())
|
2018-06-05 01:10:47 +02:00
|
|
|
|
2018-06-05 23:34:57 +02:00
|
|
|
@celery.task()
|
|
|
|
def getMeta(urlstr, author):
|
2018-06-06 00:13:39 +02:00
|
|
|
gitDir, _ = cloneRepo(urlstr, recursive=True)
|
2020-01-19 02:22:33 +01:00
|
|
|
|
|
|
|
try:
|
|
|
|
tree = build_tree(gitDir, author=author, repo=urlstr)
|
|
|
|
except MinetestCheckError as err:
|
|
|
|
raise TaskError(str(err))
|
|
|
|
|
2018-06-05 20:47:02 +02:00
|
|
|
shutil.rmtree(gitDir)
|
|
|
|
|
2018-06-05 01:10:47 +02:00
|
|
|
result = {}
|
|
|
|
result["name"] = tree.name
|
|
|
|
result["provides"] = tree.fold("name")
|
|
|
|
result["type"] = tree.type.name
|
2018-05-27 23:03:54 +02:00
|
|
|
|
2018-06-05 01:10:47 +02:00
|
|
|
for key in ["depends", "optional_depends"]:
|
|
|
|
result[key] = tree.fold("meta", key)
|
2018-05-27 23:03:54 +02:00
|
|
|
|
2018-06-05 01:10:47 +02:00
|
|
|
for key in ["title", "repo", "issueTracker", "forumId", "description", "short_description"]:
|
|
|
|
result[key] = tree.get(key)
|
2018-05-12 18:28:04 +02:00
|
|
|
|
2018-06-05 01:10:47 +02:00
|
|
|
for mod in result["provides"]:
|
|
|
|
result["depends"].discard(mod)
|
|
|
|
result["optional_depends"].discard(mod)
|
2018-05-27 23:03:54 +02:00
|
|
|
|
2018-06-05 01:10:47 +02:00
|
|
|
for key, value in result.items():
|
|
|
|
if isinstance(value, set):
|
|
|
|
result[key] = list(value)
|
2018-05-12 19:50:09 +02:00
|
|
|
|
2018-05-11 13:57:16 +02:00
|
|
|
return result
|
2018-05-11 16:04:17 +02:00
|
|
|
|
2018-05-15 20:35:59 +02:00
|
|
|
|
2018-06-06 00:13:39 +02:00
|
|
|
def makeVCSReleaseFromGithub(id, branch, release, url):
|
|
|
|
urlmaker = GithubURLMaker(url)
|
2018-05-11 16:04:17 +02:00
|
|
|
if not urlmaker.isValid():
|
|
|
|
raise TaskError("Invalid github repo URL")
|
|
|
|
|
2018-05-23 19:49:23 +02:00
|
|
|
commitsURL = urlmaker.getCommitsURL(branch)
|
2019-08-09 12:25:16 +02:00
|
|
|
try:
|
|
|
|
contents = urllib.request.urlopen(commitsURL).read().decode("utf-8")
|
|
|
|
commits = json.loads(contents)
|
2019-08-09 12:27:54 +02:00
|
|
|
except HTTPError:
|
2019-08-09 12:25:16 +02:00
|
|
|
raise TaskError("Unable to get commits for Github repository. Either the repository or reference doesn't exist.")
|
2018-05-11 16:04:17 +02:00
|
|
|
|
2018-05-23 19:49:23 +02:00
|
|
|
if len(commits) == 0 or not "sha" in commits[0]:
|
2018-05-11 16:04:17 +02:00
|
|
|
raise TaskError("No commits found")
|
|
|
|
|
2018-07-28 15:48:03 +02:00
|
|
|
release.url = urlmaker.getCommitDownload(commits[0]["sha"])
|
|
|
|
release.task_id = None
|
|
|
|
release.commit_hash = commits[0]["sha"]
|
2019-03-29 21:47:48 +01:00
|
|
|
release.approve(release.package.author)
|
2018-05-11 16:04:17 +02:00
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
return release.url
|
2018-05-15 20:35:59 +02:00
|
|
|
|
|
|
|
|
2020-01-19 02:59:00 +01:00
|
|
|
@celery.task(bind=True)
|
|
|
|
def checkZipRelease(self, id, path):
|
2020-01-19 02:37:15 +01:00
|
|
|
release = PackageRelease.query.get(id)
|
|
|
|
if release is None:
|
|
|
|
raise TaskError("No such release!")
|
|
|
|
elif release.package is None:
|
|
|
|
raise TaskError("No package attached to release")
|
|
|
|
|
|
|
|
temp = getTempDir()
|
|
|
|
try:
|
|
|
|
with ZipFile(path, 'r') as zip_ref:
|
|
|
|
zip_ref.extractall(temp)
|
|
|
|
|
|
|
|
try:
|
|
|
|
tree = build_tree(temp, expected_type=ContentType[release.package.type.name], \
|
|
|
|
author=release.package.author.username, name=release.package.name)
|
|
|
|
except MinetestCheckError as err:
|
2020-01-19 02:59:00 +01:00
|
|
|
if "Fails validation" not in release.title:
|
|
|
|
release.title += " (Fails validation)"
|
|
|
|
|
|
|
|
release.task_id = self.request.id
|
|
|
|
release.approved = False
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-01-19 02:37:15 +01:00
|
|
|
raise TaskError(str(err))
|
|
|
|
|
|
|
|
release.task_id = None
|
|
|
|
release.approve(release.package.author)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
finally:
|
|
|
|
shutil.rmtree(temp)
|
|
|
|
|
2018-06-06 00:13:39 +02:00
|
|
|
|
|
|
|
@celery.task()
|
|
|
|
def makeVCSRelease(id, branch):
|
|
|
|
release = PackageRelease.query.get(id)
|
|
|
|
if release is None:
|
|
|
|
raise TaskError("No such release!")
|
|
|
|
elif release.package is None:
|
|
|
|
raise TaskError("No package attached to release")
|
|
|
|
|
2020-01-19 01:28:26 +01:00
|
|
|
# url = urlparse(release.package.repo)
|
|
|
|
# if url.netloc == "github.com":
|
|
|
|
# return makeVCSReleaseFromGithub(id, branch, release, url)
|
2018-06-06 00:13:39 +02:00
|
|
|
|
2020-01-19 01:28:26 +01:00
|
|
|
gitDir, repo = cloneRepo(release.package.repo, ref=branch, recursive=True)
|
|
|
|
|
2020-01-19 02:22:33 +01:00
|
|
|
try:
|
|
|
|
tree = build_tree(gitDir, expected_type=ContentType[release.package.type.name], \
|
|
|
|
author=release.package.author.username, name=release.package.name)
|
|
|
|
except MinetestCheckError as err:
|
|
|
|
raise TaskError(str(err))
|
|
|
|
|
2020-01-19 01:28:26 +01:00
|
|
|
try:
|
|
|
|
filename = randomString(10) + ".zip"
|
|
|
|
destPath = os.path.join(app.config["UPLOAD_DIR"], filename)
|
|
|
|
|
|
|
|
assert(not os.path.isfile(destPath))
|
|
|
|
archiver = GitArchiver(force_sub=True, main_repo_abspath=gitDir)
|
|
|
|
archiver.create(destPath)
|
|
|
|
assert(os.path.isfile(destPath))
|
|
|
|
|
|
|
|
release.url = "/uploads/" + filename
|
|
|
|
release.task_id = None
|
|
|
|
release.commit_hash = repo.head.object.hexsha
|
|
|
|
release.approve(release.package.author)
|
|
|
|
print(release.url)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
return release.url
|
|
|
|
finally:
|
|
|
|
shutil.rmtree(gitDir)
|
2018-06-06 00:13:39 +02:00
|
|
|
|
2018-05-15 20:35:59 +02:00
|
|
|
@celery.task()
|
|
|
|
def importRepoScreenshot(id):
|
|
|
|
package = Package.query.get(id)
|
2018-05-25 19:28:24 +02:00
|
|
|
if package is None or package.soft_deleted:
|
2018-05-15 20:35:59 +02:00
|
|
|
raise Exception("Unexpected none package")
|
|
|
|
|
|
|
|
# Get URL Maker
|
2018-06-06 00:13:39 +02:00
|
|
|
try:
|
|
|
|
gitDir, _ = cloneRepo(package.repo)
|
|
|
|
except TaskError as e:
|
|
|
|
# ignore download errors
|
|
|
|
print(e)
|
|
|
|
return None
|
2018-05-15 20:35:59 +02:00
|
|
|
|
2018-06-05 23:34:57 +02:00
|
|
|
# Find and import screenshot
|
2018-05-15 20:35:59 +02:00
|
|
|
try:
|
2018-06-05 23:34:57 +02:00
|
|
|
for ext in ["png", "jpg", "jpeg"]:
|
|
|
|
sourcePath = gitDir + "/screenshot." + ext
|
|
|
|
if os.path.isfile(sourcePath):
|
|
|
|
filename = randomString(10) + "." + ext
|
2020-01-18 02:20:32 +01:00
|
|
|
destPath = os.path.join(app.config["UPLOAD_DIR"], filename)
|
2018-06-05 23:34:57 +02:00
|
|
|
shutil.copyfile(sourcePath, destPath)
|
|
|
|
|
|
|
|
ss = PackageScreenshot()
|
|
|
|
ss.approved = True
|
|
|
|
ss.package = package
|
|
|
|
ss.title = "screenshot.png"
|
|
|
|
ss.url = "/uploads/" + filename
|
|
|
|
db.session.add(ss)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
return "/uploads/" + filename
|
|
|
|
finally:
|
|
|
|
shutil.rmtree(gitDir)
|
|
|
|
|
|
|
|
print("screenshot.png does not exist")
|
2018-05-15 20:35:59 +02:00
|
|
|
return None
|
2018-05-27 23:34:24 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def getDepends(package):
|
|
|
|
url = urlparse(package.repo)
|
|
|
|
urlmaker = None
|
|
|
|
if url.netloc == "github.com":
|
|
|
|
urlmaker = GithubURLMaker(url)
|
|
|
|
else:
|
2018-05-28 00:02:11 +02:00
|
|
|
return {}
|
2018-05-27 23:34:24 +02:00
|
|
|
|
|
|
|
result = {}
|
2018-05-28 00:02:11 +02:00
|
|
|
if not urlmaker.isValid():
|
|
|
|
return {}
|
2018-05-27 23:34:24 +02:00
|
|
|
|
2018-05-28 00:02:11 +02:00
|
|
|
#
|
|
|
|
# Try getting depends on mod.conf
|
|
|
|
#
|
|
|
|
try:
|
|
|
|
contents = urllib.request.urlopen(urlmaker.getModConfURL()).read().decode("utf-8")
|
2020-01-19 02:22:33 +01:00
|
|
|
conf = parse_conf(contents)
|
2018-05-28 00:02:11 +02:00
|
|
|
for key in ["depends", "optional_depends"]:
|
|
|
|
try:
|
|
|
|
result[key] = conf[key]
|
|
|
|
except KeyError:
|
|
|
|
pass
|
2018-05-27 23:34:24 +02:00
|
|
|
|
2018-05-28 00:02:11 +02:00
|
|
|
except HTTPError:
|
|
|
|
print("mod.conf does not exist")
|
2018-05-27 23:34:24 +02:00
|
|
|
|
2018-05-28 00:02:11 +02:00
|
|
|
if "depends" in result or "optional_depends" in result:
|
|
|
|
return result
|
2018-05-27 23:34:24 +02:00
|
|
|
|
|
|
|
|
2018-05-28 00:02:11 +02:00
|
|
|
#
|
|
|
|
# Try depends.txt
|
|
|
|
#
|
|
|
|
import re
|
|
|
|
pattern = re.compile("^([a-z0-9_]+)\??$")
|
|
|
|
try:
|
|
|
|
contents = urllib.request.urlopen(urlmaker.getDependsURL()).read().decode("utf-8")
|
|
|
|
soft = []
|
|
|
|
hard = []
|
|
|
|
for line in contents.split("\n"):
|
|
|
|
line = line.strip()
|
|
|
|
if pattern.match(line):
|
|
|
|
if line[len(line) - 1] == "?":
|
|
|
|
soft.append( line[:-1])
|
|
|
|
else:
|
|
|
|
hard.append(line)
|
|
|
|
|
|
|
|
result["depends"] = ",".join(hard)
|
|
|
|
result["optional_depends"] = ",".join(soft)
|
|
|
|
except HTTPError:
|
|
|
|
print("depends.txt does not exist")
|
2018-05-27 23:34:24 +02:00
|
|
|
|
2018-05-28 00:02:11 +02:00
|
|
|
return result
|
2018-05-27 23:34:24 +02:00
|
|
|
|
|
|
|
|
|
|
|
def importDependencies(package, mpackage_cache):
|
|
|
|
if Dependency.query.filter_by(depender=package).count() != 0:
|
|
|
|
return
|
|
|
|
|
|
|
|
result = getDepends(package)
|
|
|
|
|
|
|
|
if "depends" in result:
|
|
|
|
deps = Dependency.SpecToList(package, result["depends"], mpackage_cache)
|
|
|
|
print("{} hard: {}".format(len(deps), result["depends"]))
|
|
|
|
for dep in deps:
|
|
|
|
dep.optional = False
|
|
|
|
db.session.add(dep)
|
|
|
|
|
|
|
|
if "optional_depends" in result:
|
|
|
|
deps = Dependency.SpecToList(package, result["optional_depends"], mpackage_cache)
|
|
|
|
print("{} soft: {}".format(len(deps), result["optional_depends"]))
|
|
|
|
for dep in deps:
|
|
|
|
dep.optional = True
|
|
|
|
db.session.add(dep)
|
|
|
|
|
|
|
|
@celery.task()
|
|
|
|
def importAllDependencies():
|
|
|
|
Dependency.query.delete()
|
|
|
|
mpackage_cache = {}
|
|
|
|
packages = Package.query.filter_by(type=PackageType.MOD).all()
|
|
|
|
for i, p in enumerate(packages):
|
|
|
|
print("============= {} ({}/{}) =============".format(p.name, i, len(packages)))
|
|
|
|
importDependencies(p, mpackage_cache)
|
|
|
|
|
|
|
|
db.session.commit()
|