diff --git a/coverage_crawler/crawler.py b/coverage_crawler/crawler.py index 2fd4641..6bd2fe0 100644 --- a/coverage_crawler/crawler.py +++ b/coverage_crawler/crawler.py @@ -290,4 +290,4 @@ def run(website): driver.quit() - return os.path.abspath(os.path.join(os.getcwd(), '{}/report'.format(data_folder))) + return os.path.abspath(os.path.join(os.getcwd(), data_folder)) diff --git a/coverage_crawler/github.py b/coverage_crawler/github.py new file mode 100644 index 0000000..65a32e6 --- /dev/null +++ b/coverage_crawler/github.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- + +import os +import shutil +import subprocess + + +def upload_to_github(reports_list, git_user_name, git_password): + # Clone the repository if doesn't exist + if not os.path.isdir('coverage-crawler-reports'): + subprocess.run(['git', 'clone', 'https://github.com/rhcu/coverage-crawler-reports']) + os.chdir('coverage-crawler-reports') + + # Remove the content of repository except of README + prev_files = os.listdir(os.getcwd()) + for f in prev_files: + if os.path.isdir(f): + shutil.rmtree(f) + elif f.startswith('.') is False: + os.remove(f) + subprocess.run(['git', 'pull', 'https://github.com/rhcu/coverage-crawler-reports', 'master']) + + with open('index.html', 'w') as f: + f.write(""" + + + Reports + + +

The list of available reports for websites:

+
+ """) + for website, report in reports_list.items(): + # Push the new content + shutil.move(report, os.getcwd()) + name_of_folder = report.rsplit('/', 1)[-1] + f.write('{}'.format(os.path.join(name_of_folder, 'report/index.html'), website)) + f.write('
') + f.write(""" + + + """) + + # Commit the content + subprocess.run(['git', 'init']) + subprocess.run(['git', 'add', '*']) + subprocess.run(['git', 'commit', '-m', 'Coverage crawler reports upload']) + subprocess.run(['git', 'push', 'https://{}:{}@github.com/rhcu/coverage-crawler-reports'.format(git_user_name, git_password), 'master', '--force']) diff --git a/run_crawler.py b/run_crawler.py index b15cbc8..bbc28ed 100644 --- a/run_crawler.py +++ b/run_crawler.py @@ -1,7 +1,13 @@ # -*- coding: utf-8 -*- from coverage_crawler import crawler +from coverage_crawler import github + +reports = {} with open('websites.txt') as f: for website in f: report = crawler.run(website) + reports[website] = report + +github.upload_to_github(reports, 'GIT_ACCOUNT', 'GIT_PASSWORD')