summaryrefslogtreecommitdiff
path: root/util/brew-convert.py
diff options
context:
space:
mode:
authorMCApollo <34170230+MCApollo@users.noreply.github.com>2019-04-20 20:49:46 -0500
committerMCApollo <34170230+MCApollo@users.noreply.github.com>2019-04-23 20:18:47 -0500
commit12335518ab39608d58370c85ff9f5384ad2aa5f7 (patch)
tree352d81f2a2de3f1252af732080ec0fde38c13b4d /util/brew-convert.py
parenta2b26ad12d4fa12f0273645caf4be6d0b8b71e7c (diff)
Ported in the homebrew-marauder for a hacky update/import system.
TODO: Maybe add a license & fix up messy code.
Diffstat (limited to 'util/brew-convert.py')
-rwxr-xr-xutil/brew-convert.py225
1 files changed, 225 insertions, 0 deletions
diff --git a/util/brew-convert.py b/util/brew-convert.py
new file mode 100755
index 000000000..f027ad54b
--- /dev/null
+++ b/util/brew-convert.py
@@ -0,0 +1,225 @@
+#!/usr/bin/env python3
+import sys
+import os
+import json
+import glob
+import zipfile
+import tempfile
+import argparse
+from urllib.request import urlopen
+from io import BytesIO
+
+sys.path.append(os.path.split(os.path.abspath(__file__))[0] + '/.marauder')
+import marauder as m
+
+# Global variables:
+# Swapping for linux-brew might work.
+url = 'https://github.com/Homebrew/homebrew-core/archive/master.zip'
+# Use this string as the default version
+_version = "1.0-unknown"
+# Pick the hidden dot-file that holds json information.
+_dotfile = "beer"
+
+
+def download():
+ """ Downloads and extracts the url """
+ print(f"Downloading {url}", file=sys.stderr)
+
+ global extract
+ dl = urlopen(url)
+ z = zipfile.ZipFile(BytesIO(dl.read()))
+ extract = os.path.commonprefix(z.namelist()) + "Formula"
+
+ for file in z.namelist():
+ if file.startswith(extract):
+ z.extract(file)
+
+
+def convert(create=False):
+ """ General purpose, converts the formulas """
+ global extract
+
+ def creator(data):
+ """ Creates/write out the formula to the build-system """
+ nonlocal file
+
+ def skip():
+ """ Returns true if formula has any unwanted dependencies """
+ nonlocal data
+ s = ['python@2',
+ 'python',
+ 'go',
+ 'mono',
+ 'ruby',
+ 'x11',
+ 'xcode',
+ 'java',
+ 'rust',
+ 'osxfuse',
+ 'gtk+3',
+ 'zsh',
+ 'perl',
+ 'node',
+ 'cabal-install',
+ 'ghc',
+ 'ant']
+
+ for d in data['depends']:
+ if any(d['depend'] in depend for depend in s):
+ return True
+ return False
+
+ if skip():
+ return None
+ elif data['url'].endswith('.git') or 'svn.' in os.path.dirname(data['url']):
+ return None # example.com/foo.git; svn.example.com/trunk
+
+ data['version'] = m.version(data['url']) or _version # <--- or default
+ data['install'] = [line for line in data['install'] if line != ''] # Strips lines
+ data['file'] = os.path.basename(file) # foobar.json | The original formula
+
+ # print(f"{data['name']}\t{data['version']}\t{data['url']}", file=sys.stderr)
+
+ # Create directories
+ n = data['name'].lower() # Name
+ try:
+ os.makedirs(f"{n}/_metadata")
+ except FileExistsError as e:
+ print(f"Directory {data['name']} already exists! Skipping... ({e}) {data['file']}", file=sys.stderr)
+ return -1
+ with open(f"{n}/_metadata/name", 'w') as f_name, \
+ open(f"{n}/_metadata/description", 'w') as f_description, \
+ open(f"{n}/_metadata/version", 'w') as f_version, \
+ open(f"{n}/_metadata/homepage", 'w') as f_homepage, \
+ open(f"{n}/.{_dotfile}", 'w') as f_dotfile, \
+ open(f"{n}/.make.sh-auto", 'w') as f_make, \
+ os.fdopen(os.open(f"{n}/download.sh", os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0o755), 'w') as f_download:
+ # _metadata
+ print(f"{data['name']}", file=f_name) # Use print because it appends a newline.
+ print(f"{data['description']}", file=f_description)
+ print(f"{data['version']}", file=f_version)
+ print(f"{data['homepage']}", file=f_homepage)
+ # JSON information file for whatever
+ json.dump(data, f_dotfile, indent=4)
+ # Create a script to download the file.
+ f_download.write(f"wget {data['url']}")
+ if data['mirror']:
+ f_download.write(f" || wget {data['mirror']}")
+ if data['patches']:
+ with os.fdopen(os.open(f"{n}/patches.sh", os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0o755), 'w') as f_patches:
+ for p in data['patches']:
+ if not p['url']: # There's data in this entry.
+ try:
+ assert p['data'] # But it might be a parsing error.
+
+ print("echo 'Creating brew-patch.diff'", file=f_patches)
+ print("cat << EOF >> brew-patch.diff", file=f_patches)
+ for l in p['data']:
+ f_patches.write(l)
+ print("EOF", file=f_patches)
+ except KeyError:
+ pass
+ else: # not p['url']
+ print(f"wget {p['url']}", file=f_patches)
+ # // if data['patches']
+ if data['install']:
+ try:
+ print("pkg:setup", file=f_make)
+ for l in m.makesh(data['install']):
+ print(f"{l}", file=f_make)
+ except TypeError: # FIXME
+ os.remove(f"{n}/.make.sh-auto")
+ pass
+
+ # TODO:
+ # -> Parse `def install`
+
+ def updater(dotfile, data, file):
+ """ Updates a formula """
+ if dotfile['url'] == data['url']:
+ # print(f"{dotfile['name']} is up to date.", file=sys.stderr)
+ return 0
+ # Version comes from the urls, so check the urls instead.
+ else:
+ print(f"{dotfile['name']} has to be updated.", file=sys.stdout)
+
+ data['version'] = m.version(data['url']) or _version
+ data['install'] = [line for line in data['install'] if line != '']
+ data['file'] = dotfile['file']
+
+ n = os.path.dirname(file)
+ with open(f"{n}/_metadata/version", 'w') as f_version, \
+ open(f"{n}/.{_dotfile}", 'w') as f_dotfile, \
+ os.fdopen(os.open(f"{n}/download.sh", os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0o755), 'w') as f_download:
+
+ print(f"{data['version']}", file=f_version)
+ json.dump(data, f_dotfile, indent=4)
+ f_download.write(f"wget {data['url']}")
+ if data['mirror']:
+ f_download.write(f" || wget {data['mirror']}")
+ try: # Just try to remove the old tarball.
+ os.remove(f"{n}/" + os.path.basename(dotfile['url']))
+ except OSError:
+ pass
+ if data['patches']:
+ with os.fdopen(os.open(f"{n}/patches.sh", os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0o755), 'w') as f_patches:
+ for p in data['patches']:
+ if not p['url']:
+ try:
+ assert p['data']
+ print("echo 'Creating brew-patch.diff'", file=f_patches)
+ print("cat << EOF >> brew-patch.diff", file=f_patches)
+ for l in p['data']:
+ f_patches.write(l)
+ print("EOF", file=f_patches)
+ except KeyError:
+ pass
+ else:
+ print(f"wget {p['url']}", file=f_patches)
+
+
+ with tempfile.TemporaryDirectory() as t:
+ # `pushd` tmpdir
+ cwd = os.getcwd()
+ os.chdir(t)
+ # Grab master and extract
+ download()
+ assert extract
+ extract = os.path.realpath(extract)
+ # `popd`
+ os.chdir(cwd)
+
+ if create:
+ for file in sorted(glob.glob(extract + '/*')):
+ creator(m.parse(file))
+ else:
+ for file in sorted(glob.glob(f"*/.{_dotfile}")):
+ f = json.load(open(file))
+ rb = f"{extract}/" + f['file']
+ updater(f, m.parse(rb), file)
+
+
+def parse_arg(argv):
+ parser = argparse.ArgumentParser(prog='Brew-Converter', description='Converts homebrew formulas to `pwd`. Be sure to cd before using this tool.')
+ group = parser.add_mutually_exclusive_group()
+ # Args
+ group.add_argument('--all', help='Create and parse all brew formulas',
+ action='store_true', default=False, dest='all')
+ group.add_argument('--upgrade', help='Upgrade all formulas present',
+ action='store_true', default=False, dest='upgrade')
+ results = parser.parse_args()
+ # Checking
+ if len(argv) <= 0:
+ parser.print_help()
+ sys.exit(1)
+ elif results.all:
+ convert(create=True)
+ elif results.upgrade:
+ convert()
+ else:
+ print(f"Something went wrong: {results}")
+ sys.exit(3)
+
+
+if __name__ == '__main__':
+ parse_arg(sys.argv[1:])