Browse Source

Revert "Conducto CI Integration (Replacing Travis CI) (#367)" (#372)

This reverts commit 8bc47dc118.
Due to unforeseen circumstances, Conducto no longer exists; reverting changes.
pull/382/head
Chris Caron 4 years ago committed by GitHub
parent
commit
e0c0b66ff0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 7
      .conducto.cfg
  2. 34
      .conducto/Dockerfile.el7
  3. 32
      .conducto/Dockerfile.el8
  4. 4
      .conducto/Dockerfile.py27
  5. 4
      .conducto/Dockerfile.py35
  6. 4
      .conducto/Dockerfile.py36
  7. 4
      .conducto/Dockerfile.py37
  8. 4
      .conducto/Dockerfile.py38
  9. 4
      .conducto/Dockerfile.py39
  10. 2
      .coveragerc
  11. 47
      .travis.yml
  12. 1
      README.md
  13. 7
      codecov.yml
  14. 1
      dev-requirements.txt
  15. 201
      pipeline.py
  16. 2
      setup.cfg
  17. 3
      test/test_glib_plugin.py
  18. 3
      test/test_gnome_plugin.py
  19. 104
      tox.ini

7
.conducto.cfg

@ -1,7 +0,0 @@
[.general]
name = Apprise
desc = Push Notifications that work with just about every platform!
init = pipeline.py
[pr common]
command = python pipeline.py all_checks

34
.conducto/Dockerfile.el7

@ -1,34 +0,0 @@
# Base
FROM centos:7
ENV container docker
RUN ( \
cd /lib/systemd/system/sysinit.target.wants/; for i in *; do [ $i == \
systemd-tmpfiles-setup.service ] || rm -f $i; done); \
rm -f /lib/systemd/system/multi-user.target.wants/*;\
rm -f /etc/systemd/system/*.wants/*;\
rm -f /lib/systemd/system/local-fs.target.wants/*; \
rm -f /lib/systemd/system/sockets.target.wants/*udev*; \
rm -f /lib/systemd/system/sockets.target.wants/*initctl*; \
rm -f /lib/systemd/system/basic.target.wants/*;\
rm -f /lib/systemd/system/anaconda.target.wants/*; \
echo "assumeyes=1" >> /etc/yum.conf; \
yum install -y epel-release; \
yum install -y rpm-build rpmlint python-pip python-virtualenv sudo rsync;
# RPM Build Structure Setup
ENV FLAVOR=rpmbuild OS=centos DIST=el7
RUN useradd builder -u 1000 -m -G users,wheel &>/dev/null && \
echo "builder ALL=(ALL:ALL) NOPASSWD:ALL" >> /etc/sudoers && \
echo "# macros" > /home/builder/.rpmmacros && \
echo "%_topdir /home/builder" >> /home/builder/.rpmmacros && \
echo "%_sourcedir %{_topdir}/packaging/redhat" >> /home/builder/.rpmmacros && \
echo "%_builddir %{_topdir}/packaging/redhat" >> /home/builder/.rpmmacros && \
echo "%_specdir %{_topdir}/packaging/redhat" >> /home/builder/.rpmmacros && \
echo "%_rpmdir %{_topdir}/rpm" >> /home/builder/.rpmmacros && \
echo "%_srcrpmdir %{_topdir}/rpm" >> /home/builder/.rpmmacros && \
mkdir /home/builder/rpm
# The ronn package isn't easily available without connecting to custom
# repositories. Since EL7 runs Python v2.7 (soon to be no longer supported)
# we will emulate the ronn binary being present and not worry about it
RUN ln -snf /usr/bin/true /usr/bin/ronn

32
.conducto/Dockerfile.el8

@ -1,32 +0,0 @@
# Base
FROM centos:8
ENV container docker
RUN ( \
cd /lib/systemd/system/sysinit.target.wants/; for i in *; do [ $i == \
systemd-tmpfiles-setup.service ] || rm -f $i; done); \
rm -f /lib/systemd/system/multi-user.target.wants/*;\
rm -f /etc/systemd/system/*.wants/*;\
rm -f /lib/systemd/system/local-fs.target.wants/*; \
rm -f /lib/systemd/system/sockets.target.wants/*udev*; \
rm -f /lib/systemd/system/sockets.target.wants/*initctl*; \
rm -f /lib/systemd/system/basic.target.wants/*;\
rm -f /lib/systemd/system/anaconda.target.wants/*; \
echo "assumeyes=1" >> /etc/yum.conf; \
dnf install -y epel-release; \
dnf install -y rpm-build rpmlint python3-pip python3-virtualenv rubygem-ronn \
dnf-plugins-core 'dnf-command(config-manager)' \
'dnf-command(builddep)' sudo rsync; \
dnf config-manager --set-enabled powertools;
# RPM Build Structure Setup
ENV FLAVOR=rpmbuild OS=centos DIST=el8
RUN useradd builder -u 1000 -m -G users,wheel &>/dev/null && \
echo "builder ALL=(ALL:ALL) NOPASSWD:ALL" >> /etc/sudoers && \
echo "# macros" > /home/builder/.rpmmacros && \
echo "%_topdir /home/builder" >> /home/builder/.rpmmacros && \
echo "%_sourcedir %{_topdir}/packaging/redhat" >> /home/builder/.rpmmacros && \
echo "%_builddir %{_topdir}/packaging/redhat" >> /home/builder/.rpmmacros && \
echo "%_specdir %{_topdir}/packaging/redhat" >> /home/builder/.rpmmacros && \
echo "%_rpmdir %{_topdir}/rpm" >> /home/builder/.rpmmacros && \
echo "%_srcrpmdir %{_topdir}/rpm" >> /home/builder/.rpmmacros && \
mkdir /home/builder/rpm

4
.conducto/Dockerfile.py27

@ -1,4 +0,0 @@
# Base
FROM python:2.7-buster
RUN apt-get update && apt-get install -y libdbus-1-dev build-essential musl-dev
RUN pip install dbus-python

4
.conducto/Dockerfile.py35

@ -1,4 +0,0 @@
# Base
FROM python:3.5-buster
RUN apt-get update && apt-get install -y libdbus-1-dev build-essential musl-dev
RUN pip install dbus-python

4
.conducto/Dockerfile.py36

@ -1,4 +0,0 @@
# Base
FROM python:3.6-buster
RUN apt-get update && apt-get install -y libdbus-1-dev build-essential musl-dev
RUN pip install dbus-python

4
.conducto/Dockerfile.py37

@ -1,4 +0,0 @@
# Base
FROM python:3.7-buster
RUN apt-get update && apt-get install -y libdbus-1-dev build-essential musl-dev
RUN pip install dbus-python

4
.conducto/Dockerfile.py38

@ -1,4 +0,0 @@
# Base
FROM python:3.8-buster
RUN apt-get update && apt-get install -y libdbus-1-dev build-essential musl-dev
RUN pip install dbus-python

4
.conducto/Dockerfile.py39

@ -1,4 +0,0 @@
# Base
FROM python:3.9-buster
RUN apt-get update && apt-get install -y libdbus-1-dev build-essential musl-dev
RUN pip install dbus-python

2
.coveragerc

@ -8,6 +8,8 @@ source =
[paths]
source =
apprise
.tox/*/lib/python*/site-packages/apprise
.tox/pypy/site-packages/apprise
[report]
show_missing = True

47
.travis.yml

@ -0,0 +1,47 @@
language: python
dist: xenial
addons:
apt:
packages:
- libdbus-1-dev
matrix:
include:
- python: "2.7"
env: TOXENV=py27
- python: "3.5"
env: TOXENV=py35
- python: "3.6"
env: TOXENV=py36
- python: "3.7"
env: TOXENV=py37
- python: "3.8"
env: TOXENV=py38
- python: "3.9-dev"
env: TOXENV=py39
- python: "pypy2.7-6.0"
env: TOXENV=pypy
- python: "pypy3.5-7.0"
env: TOXENV=pypy3
install:
- pip install babel
- pip install .
- pip install codecov
- pip install -r dev-requirements.txt
- pip install -r requirements.txt
- if [[ $TRAVIS_PYTHON_VERSION != 'pypy'* ]]; then travis_retry pip install dbus-python; fi
# run tests
script:
- tox
after_success:
- tox -e coverage-report
- codecov
notifications:
email: false

1
README.md

@ -22,6 +22,7 @@ System Administrators and DevOps who wish to send a notification now no longer n
[![Follow](https://img.shields.io/twitter/follow/l2gnux)](https://twitter.com/l2gnux/)<br/>
[![Discord](https://img.shields.io/discord/558793703356104724.svg?colorB=7289DA&label=Discord&logo=Discord&logoColor=7289DA&style=flat-square)](https://discord.gg/MMPeN2D)
[![Python](https://img.shields.io/pypi/pyversions/apprise.svg?style=flat-square)](https://pypi.org/project/apprise/)
[![Build Status](https://travis-ci.org/caronc/apprise.svg?branch=master)](https://travis-ci.org/caronc/apprise)
[![CodeCov Status](https://codecov.io/github/caronc/apprise/branch/master/graph/badge.svg)](https://codecov.io/github/caronc/apprise)
[![PyPi](https://img.shields.io/pypi/dm/apprise.svg?style=flat-square)](https://pypi.org/project/apprise/)

7
codecov.yml

@ -1,7 +0,0 @@
# https://app.codecov.io
coverage:
status:
project:
default:
target: 100% # the required coverage value
threshold: 1% # the leniency in hitting the target

1
dev-requirements.txt

@ -3,6 +3,7 @@ flake8
mock
pytest
pytest-cov
tox
babel
#

201
pipeline.py

@ -1,201 +0,0 @@
# -*- coding: utf-8 -*-
# A Conducto Pipeline
# Visit https://www.conducto.com for more information.
import os
import conducto as co
from inspect import cleandoc
def all_checks(release=None) -> co.Serial:
"""
Define our Full Conducto Pipeline
"""
# Dockerfile Context
context = '.'
# Shared Pipeline Directory
share = '/conducto/data/pipeline/apprise'
# The directory the project can be found in within the containers
repo = '/apprise'
# Unit Testing
dockerfiles = (
# Define our Containers
("Python 3.9", os.path.join('.conducto', 'Dockerfile.py39')),
("Python 3.8", os.path.join('.conducto', 'Dockerfile.py38')),
("Python 3.7", os.path.join('.conducto', 'Dockerfile.py37')),
("Python 3.6", os.path.join('.conducto', 'Dockerfile.py36')),
("Python 3.5", os.path.join('.conducto', 'Dockerfile.py35')),
("Python 2.7", os.path.join('.conducto', 'Dockerfile.py27')),
)
# Package Testing
pkg_dockerfiles = (
# Define our Containers
("EL8 RPM", os.path.join('.conducto', 'Dockerfile.el8')),
("EL7 RPM", os.path.join('.conducto', 'Dockerfile.el7')),
)
# find generated coverage filename and store it in the pipeline
coverage_template = cleandoc('''
pip install -r requirements.txt -r dev-requirements.txt || exit 1
mkdir --verbose -p {share}/coverage && \\
coverage run --parallel -m pytest && \\
find . -mindepth 1 -maxdepth 1 -type f \\
-name '.coverage.*' \\
-exec mv --verbose -- {{}} {share}/coverage \;''')
# pull generated file from the pipeline and place it back into
# our working directory
coverage_report_template = cleandoc('''
pip install coverage || exit 1
find {share}/coverage -mindepth 1 -maxdepth 1 -type f \\
-name '.coverage.*' \\
-exec mv --verbose -- {{}} . \;
coverage combine . && \\
coverage report --ignore-errors --skip-covered --show-missing
# Push our coverage report to codecov.io
retry=3
iter=0
while [ $iter -lt $retry ]; do
bash <(curl -s https://codecov.io/bash) -Z
[ $? -eq 0 ] && break
sleep 1s
# loop to try again
let iter+=1
done
''')
# RPM Packaging Templates (assumes we're building as the user 'builder')
rpm_pkg_template = cleandoc('''
# copy our environment over
rsync -a ./ /home/builder/
# Permissions
chmod ug+rw -R /home/builder
chown -R builder /home/builder
# Advance to our build directory
cd /home/builder
# Prepare Virtual Environment
PYTHON=python3
VENV_CMD="$PYTHON -m venv"
# Enterprise Linux 7 (Python v2.7.5) Support
[ "$DIST" == "el7" ] && \\
VENV_CMD=virtualenv && \\
PYTHON=python2
sudo -u builder \\
$VENV_CMD . && . bin/activate && \\
pip install coverage babel wheel markdown && \\
$PYTHON setup.py extract_messages && \\
$PYTHON setup.py sdist
# Build Man Page
sudo -u builder \\
ronn --roff packaging/man/apprise.md
# Prepare RPM Package
find dist -type f -name '*.gz' \\
-exec mv --verbose {{}} packaging/redhat/ \\;
find packaging/man -type f -name '*.1' \\
-exec mv --verbose {{}} packaging/redhat/ \\;
# Build Source RPM Package
sudo -u builder \\
rpmbuild -bs packaging/redhat/python-apprise.spec || exit 1
# Install Missing RPM Dependencies
if [ -x /usr/bin/dnf ]; then
# EL8 and Newer
dnf builddep -y rpm/*.rpm || exit 1
else
# EL7 Backwards Compatibility
yum-builddep -y rpm/*.rpm || exit 1
fi
# Build our RPM using the environment we prepared
sudo -u builder \\
rpmbuild -bb packaging/redhat/python-apprise.spec''') \
.format(repo=repo)
# Define our default image keyword argument defaults
image_kwargs = {
'copy_repo': True,
'path_map': {'.': repo},
}
if release:
# Prepare release details into environment
image_kwargs.update({'env': {'PRODUCT_RELEASE': release}})
# Our base image is always the first entry defined in our dockerfiles
base_image = co.Image(
dockerfile=dockerfiles[0][1], context=context, **image_kwargs)
base_pkg_image = co.Image(
dockerfile=pkg_dockerfiles[0][1], context=context, **image_kwargs)
with co.Serial() as pipeline:
with co.Parallel(name="Presentation"):
# Code Styles
co.Exec(
'pip install flake8 && '
'flake8 . --count --show-source --statistics',
name="Style Guidelines", image=base_image)
# RPM Checking
co.Exec(
cleandoc('''rpmlint --verbose -o "NetworkEnabled False" \\
packaging/redhat/python-apprise.spec'''),
name="RPM Guidelines", image=base_pkg_image)
with co.Parallel(name="Tests"):
for entry in dockerfiles:
name, dockerfile = entry
# Prepare our Image
image = co.Image(
dockerfile=dockerfile, context=context, **image_kwargs)
# Unit Tests
# These produce files that look like:
# .coverage.{userid}.{hostname}.NNNNNN.NNNNNN where:
# - {userid} becomes the user that ran the test
# - {hostname} identifies the hostname it was built on
# - N gets replaced with a number
# The idea here is that the .coverage.* file is unique
# from others being built in other containers
co.Exec(
coverage_template.format(share=share),
name="{} Coverage".format(name), image=image)
# Coverage Reporting
co.Exec(
coverage_report_template.format(share=share),
name="Test Code Coverage", image=base_image)
with co.Parallel(name="Packaging"):
for entry in pkg_dockerfiles:
name, dockerfile = entry
image = co.Image(
dockerfile=dockerfile, context=context, **image_kwargs)
# Build our packages
co.Exec(rpm_pkg_template, name=name, image=image)
return pipeline
if __name__ == "__main__":
"""
Execute our pipeline
"""
co.main(default=all_checks)

2
setup.cfg

@ -7,7 +7,7 @@ license_file = LICENSE
[flake8]
# We exclude packages we don't maintain
exclude = .eggs
exclude = .eggs,.tox
ignore = E741,E722,W503,W504,W605
statistics = true
builtins = _

3
test/test_glib_plugin.py

@ -78,7 +78,8 @@ def test_dbus_plugin(mock_mainloop, mock_byte, mock_bytearray,
del sys.modules[gi_name]
reload(sys.modules['apprise.plugins.NotifyDBus'])
# Fake our dbus environment for testing purposes
# We need to fake our dbus environment for testing purposes since
# the gi library isn't available in Travis CI
gi = types.ModuleType(gi_name)
gi.repository = types.ModuleType(gi_name + '.repository')

3
test/test_gnome_plugin.py

@ -65,7 +65,8 @@ def test_gnome_plugin():
del sys.modules[gi_name]
reload(sys.modules['apprise.plugins.NotifyGnome'])
# Fake our Gnome environment for testing purposes
# We need to fake our gnome environment for testing purposes since
# the gi library isn't available in Travis CI
gi = types.ModuleType(gi_name)
gi.repository = types.ModuleType(gi_name + '.repository')
gi.module = types.ModuleType(gi_name + '.module')

104
tox.ini

@ -0,0 +1,104 @@
[tox]
envlist = py27,py35,py36,py37,py38,py39,pypy,pypy3,coverage-report
[testenv]
# Prevent random setuptools/pip breakages like
# https://github.com/pypa/setuptools/issues/1042 from breaking our builds.
setenv =
VIRTUALENV_NO_DOWNLOAD=1
deps=
-r{toxinidir}/requirements.txt
-r{toxinidir}/dev-requirements.txt
commands =
python setup.py compile_catalog
coverage run --parallel -m pytest {posargs}
flake8 . --count --show-source --statistics
[testenv:py27]
deps=
dbus-python
-r{toxinidir}/requirements.txt
-r{toxinidir}/dev-requirements.txt
commands =
python setup.py compile_catalog
coverage run --parallel -m pytest {posargs}
flake8 . --count --show-source --statistics
[testenv:py35]
deps=
dbus-python
-r{toxinidir}/requirements.txt
-r{toxinidir}/dev-requirements.txt
commands =
python setup.py compile_catalog
coverage run --parallel -m pytest {posargs}
flake8 . --count --show-source --statistics
[testenv:py36]
deps=
dbus-python
-r{toxinidir}/requirements.txt
-r{toxinidir}/dev-requirements.txt
commands =
python setup.py compile_catalog
coverage run --parallel -m pytest {posargs}
flake8 . --count --show-source --statistics
[testenv:py37]
deps=
dbus-python
-r{toxinidir}/requirements.txt
-r{toxinidir}/dev-requirements.txt
commands =
python setup.py compile_catalog
coverage run --parallel -m pytest {posargs}
flake8 . --count --show-source --statistics
[testenv:py38]
deps=
dbus-python
-r{toxinidir}/requirements.txt
-r{toxinidir}/dev-requirements.txt
commands =
python setup.py compile_catalog
coverage run --parallel -m pytest {posargs}
flake8 . --count --show-source --statistics
[testenv:py39]
deps=
dbus-python
-r{toxinidir}/requirements.txt
-r{toxinidir}/dev-requirements.txt
commands =
python setup.py compile_catalog
coverage run --parallel -m pytest {posargs}
flake8 . --count --show-source --statistics
[testenv:pypy]
deps=
-r{toxinidir}/requirements.txt
-r{toxinidir}/dev-requirements.txt
commands =
python setup.py compile_catalog
coverage run --parallel -m pytest {posargs}
flake8 . --count --show-source --statistics
[testenv:pypy3]
deps=
-r{toxinidir}/requirements.txt
-r{toxinidir}/dev-requirements.txt
# Last supported cryptography version that can link against
# OpenSSL v1.0.2 (which pypy35 uses) is 3.1.1
commands =
pip install --upgrade cryptography==3.1.1
python setup.py compile_catalog
coverage run --parallel -m pytest {posargs}
flake8 . --count --show-source --statistics
[testenv:coverage-report]
deps = coverage
skip_install = true
commands=
coverage combine
coverage report
Loading…
Cancel
Save