Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# Remove existing heroku client files
existing_heroku_directory_names = glob.glob(os.path.join(tmp_dir, 'heroku-cli-*'))
if len(existing_heroku_directory_names) == 0:
if os.path.exists(os.path.join(tmp_dir, 'heroku.tar.gz')):
os.remove(os.path.join(tmp_dir, 'heroku.tar.gz'))
# Get the heroku client and unzip
os.chdir(tmp_dir)
sh.wget(
shlex.split(
'{}-{}-{}.tar.gz -O heroku.tar.gz'.format(
heroku_url, os_name, bit_architecture
)
)
)
sh.tar(shlex.split('-xvzf heroku.tar.gz'))
heroku_directory_name = glob.glob(os.path.join(tmp_dir, 'heroku-cli-*'))[0]
heroku_directory_path = os.path.join(tmp_dir, heroku_directory_name)
heroku_executable_path = os.path.join(heroku_directory_path, 'bin', 'heroku')
server_source_directory_path = os.path.join(
parent_dir, legacy_server_source_directory_name
)
heroku_server_directory_path = os.path.join(
tmp_dir, '{}_{}'.format(heroku_server_directory_name, task_name)
)
# Delete old server files
sh.rm(shlex.split('-rf ' + heroku_server_directory_path))
# Copy over a clean copy into the server directory
return str(e), 400
try:
depends_path = download_depends(package_info['depends'], package_info['type'], clone_path)
except sh.ErrorReturnCode as e:
return 'Unable to install dependencies. %s' % e, 503
# remove info.yaml from tar.gz
with open(clone_path + '/.gitattributes', 'w') as f:
f.write('info.yaml export-ignore')
try:
logger.debug("Packing application to tar.gz")
sh.git("archive", ref, "--worktree-attributes", format="tar", o="app.tar", _cwd=clone_path),
if package_info["type"] == "nodejs":
sh.tar("-uf", "app.tar", "node_modules", _cwd=clone_path)
elif package_info["type"] == "python":
sh.tar("-uf", "app.tar", "-C", clone_path + "/depends", *depends_path, _cwd=clone_path)
sh.gzip("app.tar", _cwd=clone_path)
package_files = sh.tar('-tf', 'app.tar.gz', _cwd=clone_path)
package_info['structure'] = [f.strip() for f in package_files]
except sh.ErrorReturnCode as e:
return 'Unable to pack application. %s' % e, 503
try:
for line in sh.git("log", "-5", date="short", format="%h %ad %s [%an]", _cwd=clone_path):
line = line.strip()
# git log output is using ansi terminal codes which is messy for our purposes
ansisequence = re.compile(r'\x1B\[[^A-Za-z]*[A-Za-z]')
line = ansisequence.sub('', line)
line = line.strip("\x1b=\r")
os.path.join(parent_dir, 'heroku-cli-*')
)
if len(existing_heroku_directory_names) == 0:
if os.path.exists(os.path.join(parent_dir, 'heroku.tar.gz')):
os.remove(os.path.join(parent_dir, 'heroku.tar.gz'))
# Get the heroku client and unzip
os.chdir(parent_dir)
sh.wget(
shlex.split(
'{}-{}-{}.tar.gz -O heroku.tar.gz'.format(
heroku_url, os_name, bit_architecture
)
)
)
sh.tar(shlex.split('-xvzf heroku.tar.gz'))
heroku_directory_name = glob.glob(os.path.join(parent_dir, 'heroku-cli-*'))[0]
heroku_directory_path = os.path.join(parent_dir, heroku_directory_name)
heroku_executable_path = os.path.join(heroku_directory_path, 'bin', 'heroku')
server_source_directory_path = os.path.join(
parent_dir, server_source_directory_name
)
heroku_server_directory_path = os.path.join(
parent_dir, '{}_{}'.format(heroku_server_directory_name, task_name)
)
# Delete old server files
sh.rm(shlex.split('-rf ' + heroku_server_directory_path))
# Copy over a clean copy into the server directory
pass # return code 1 means unzipping had
# warnings but did complete,
# apparently happens sometimes with
# github zips
import zipfile
fileh = zipfile.ZipFile(extraction_filename, 'r')
root_directory = fileh.filelist[0].filename.split('/')[0]
if root_directory != basename(directory_name):
shprint(sh.mv, root_directory, directory_name)
elif (extraction_filename.endswith('.tar.gz') or
extraction_filename.endswith('.tgz') or
extraction_filename.endswith('.tar.bz2') or
extraction_filename.endswith('.tbz2') or
extraction_filename.endswith('.tar.xz') or
extraction_filename.endswith('.txz')):
sh.tar('xf', extraction_filename)
root_directory = shprint(
sh.tar, 'tf', extraction_filename).stdout.decode(
'utf-8').split('\n')[0].split('/')[0]
if root_directory != directory_name:
shprint(sh.mv, root_directory, directory_name)
else:
raise Exception(
'Could not extract {} download, it must be .zip, '
'.tar.gz or .tar.bz2 or .tar.xz'.format(extraction_filename))
elif isdir(extraction_filename):
mkdir(directory_name)
for entry in listdir(extraction_filename):
if entry not in ('.git',):
shprint(sh.cp, '-Rv',
join(extraction_filename, entry),
directory_name)
def extract_reviews(input_file_name, output_file_name):
# extracts to folder "sorted_data"
sh.tar("xvf", input_file_name)
reviews = []
raw_dir = "sorted_data"
categories = [name for name in os.listdir(raw_dir) if os.path.isdir(os.path.join(raw_dir, name))]
for category in categories:
positive_file_name = os.path.join(raw_dir, category, "positive.review")
negative_file_name = os.path.join(raw_dir, category, "negative.review")
positive_reviews = get_reviews(positive_file_name, label=':)', categroy=category)
negative_reviews = get_reviews(negative_file_name, label=':(', categroy=category)
reviews.extend(positive_reviews)
reviews.extend(negative_reviews)
# This folder is really big, and we still have the compressed version of this anyway so there's
# no need to keep it around.
def backup_unpack(self):
""" Unpack the ChordsBackup created backup file to a temporary directoy. """
print("*** Unpacking chords files ***")
print("Temporary directory: " + self.tmp_dir)
try:
print(
sh.tar('-xzvf', self.dump_file, '-C', self.tmp_dir, _err_to_out=True).stdout
)
except Exception as sh_err:
raise ChordsLoadError(sh_err)
self.file_check()
def _decompress_archive(self, archive_path):
"""
Decompress the given archive into the S2E environment's projects
directory.
"""
try:
logger.info('Decompressing archive %s', archive_path)
tar(extract=True, xz=True, verbose=True, file=archive_path,
directory=self.projects_path(), _out=sys.stdout,
_err=sys.stderr)
except ErrorReturnCode as e:
raise CommandError('Failed to decompress project archive - %s' % e)
version = str(git.describe("--abbrev=0", "--tags")).strip()
git.checkout(version)
# run pyinstaller
sh.mkdir("-p", "./scripts/release")
sh.cp("../../toolkit/stackhut.py", "./")
sh.cp("../../toolkit/scripts/release/stackhut_lin.spec", "./scripts/release/")
pyinstaller('-y', '--clean', "./scripts/release/stackhut_lin.spec")
# TODO - run tests on bin version...
# build tarball
rel_name = "stackhut-{}-linux-x86_64.txz".format(version)
os.chdir("dist")
sh.tar("-cJf", rel_name, "./stackhut")
# upload to github releases
header = {'Accept': 'application/vnd.github.v3+json'}
# get the release info
r = requests.get("https://api.github.com/repos/StackHut/stackhut-toolkit/releases/tags/{}".format(version),
headers=header)
if r.status_code == requests.codes.ok:
upload_url = r.json()["upload_url"]
else:
r.raise_for_status()
# upload the asset
with open(rel_name, 'rb') as rel_file:
upload_url = expand(upload_url, {"name": rel_name})
def archiveDownload(url, destination, archiveType):
logging.info('Now downloading archive file from URL %s to %s' % (url, destination))
filename = wget.download(url)
if archiveType == 'zip':
logging.info('Unzipping zip file from: ', filename)
sh.unzip(filename)
elif archiveType == 'tar.gz':
logging.info('Untarring tar.gz file from: ', filename)
sh.tar('-xvzf', filename )
logging.info('Removing archive file.')
sh.rm(filename)
return