initial commit
commit
9e8973e5ab
|
|
@ -0,0 +1,84 @@
|
||||||
|
# Suitable for dockerfile based services
|
||||||
|
.PHONY: clean clean-test clean-pyc clean-build docs help common.mk
|
||||||
|
.DEFAULT_GOAL := help
|
||||||
|
|
||||||
|
NET_NAME := docker_net
|
||||||
|
SERVICE := docker_image
|
||||||
|
SERVER :=$(patsubst %,%_server,${SERVICE})
|
||||||
|
PORT_MAP := -p 8000:8000/tcp
|
||||||
|
VALID_CMD := true
|
||||||
|
include ./common.mk
|
||||||
|
|
||||||
|
clean: clean-build clean-pyc clean-test clean-docker ## remove all build, test, coverage, Python and Docker artifacts
|
||||||
|
|
||||||
|
clean-build: ## remove build artifacts
|
||||||
|
rm -fr build/
|
||||||
|
rm -fr dist/
|
||||||
|
rm -fr .eggs/
|
||||||
|
find . -name '*.egg-info' -exec rm -fr {} +
|
||||||
|
find . -name '*.egg' -exec rm -f {} +
|
||||||
|
|
||||||
|
clean-pyc: ## remove Python file artifacts
|
||||||
|
find . -name '*.pyc' -exec rm -f {} +
|
||||||
|
find . -name '*.pyo' -exec rm -f {} +
|
||||||
|
find . -name '*~' -exec rm -f {} +
|
||||||
|
find . -name '__pycache__' -exec rm -fr {} +
|
||||||
|
|
||||||
|
clean-test: ## remove test and coverage artifacts
|
||||||
|
rm -fr .tox/
|
||||||
|
rm -f .coverage
|
||||||
|
rm -fr htmlcov/
|
||||||
|
rm -fr .pytest_cache
|
||||||
|
|
||||||
|
clean-docker: teardown## removes docker images
|
||||||
|
docker rm ${SERVER} || true
|
||||||
|
# docker rmi ${SERVICE} || true
|
||||||
|
|
||||||
|
depends:
|
||||||
|
pip install grpcio_tools
|
||||||
|
$(MAKE) build-sf-binaries
|
||||||
|
|
||||||
|
build: clean ## builds the docker container
|
||||||
|
# install the proto files in the right location
|
||||||
|
docker build --build-arg AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} \
|
||||||
|
--build-arg AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} \
|
||||||
|
--build-arg AWS_DEFAULT_REGION=${AWS_DEFAULT_REGION} \
|
||||||
|
-t ${SERVICE} .. -f ${SERVER}.Dockerfile
|
||||||
|
|
||||||
|
inspect: ## look inside the docker container
|
||||||
|
docker run -i -t --rm --network=${NET_NAME} --entrypoint /bin/bash --name ${SERVER} ${PORT_MAP} ${SERVICE}
|
||||||
|
|
||||||
|
logs: ## check docker container logs
|
||||||
|
docker logs -f ${SERVER}
|
||||||
|
|
||||||
|
.run:
|
||||||
|
docker run -i -t --rm --network=${NET_NAME} --name ${SERVER} ${PORT_MAP} ${SERVICE}
|
||||||
|
|
||||||
|
.setup: clean-docker build dockernet
|
||||||
|
|
||||||
|
run: .setup .run ## runs the docker container in foreground
|
||||||
|
|
||||||
|
teardown:
|
||||||
|
docker stop ${SERVER} || true
|
||||||
|
|
||||||
|
.deploy:
|
||||||
|
docker run -d --network=${NET_NAME} --name ${SERVER} ${PORT_MAP} ${SERVICE}
|
||||||
|
|
||||||
|
deploy: .setup .deploy ## daemonize the docker container
|
||||||
|
|
||||||
|
.val: ## run the demo client
|
||||||
|
$(VALID_CMD) || true
|
||||||
|
|
||||||
|
.wait:
|
||||||
|
sleep 60
|
||||||
|
|
||||||
|
.val-setup:
|
||||||
|
true
|
||||||
|
|
||||||
|
.val-teardown:
|
||||||
|
true
|
||||||
|
|
||||||
|
val: .val-setup .val .val-teardown
|
||||||
|
|
||||||
|
validate: clean-docker deploy .wait .val-setup val .val-teardown ## validates response from the service end2end
|
||||||
|
$(MAKE) teardown
|
||||||
|
|
@ -0,0 +1,35 @@
|
||||||
|
# Suitable for python monorepo with packages in subdirectories(contains project.mk)
|
||||||
|
.PHONY : _forward Makefile common.mk
|
||||||
|
.DEFAULT_GOAL := help
|
||||||
|
|
||||||
|
include common.mk
|
||||||
|
|
||||||
|
SUBPKGS := $(patsubst %setup.py,%,$(wildcard */setup.py))
|
||||||
|
$(warning SUBPKGS is $(SUBPKGS))
|
||||||
|
% : _forward
|
||||||
|
$(foreach t,$(SUBPKGS),$(MAKE) -C $t $@;)
|
||||||
|
|
||||||
|
check: ## lint project using pre-commit hooks installed for git
|
||||||
|
pre-commit run --all-files
|
||||||
|
|
||||||
|
setup:
|
||||||
|
python -m nltk.downloader stopwords
|
||||||
|
rm -rf logs
|
||||||
|
mkdir logs
|
||||||
|
|
||||||
|
install-reqs:
|
||||||
|
pip install -r requirements.txt
|
||||||
|
|
||||||
|
install-dev-reqs:
|
||||||
|
pip install -r requirements_dev.txt
|
||||||
|
|
||||||
|
install: uninstall pypi install-reqs setup ## installs the requirements and download components
|
||||||
|
|
||||||
|
uninstall: ## uninstalls the pip dependencies
|
||||||
|
pip uninstall -y -r requirements.txt
|
||||||
|
|
||||||
|
deploy: ## deploys the services by starting supervisord
|
||||||
|
supervisord
|
||||||
|
|
||||||
|
develop: uninstall pypi install-dev-reqs setup ## installs the requirements and setup development hooks
|
||||||
|
pre-commit install
|
||||||
|
|
@ -0,0 +1,33 @@
|
||||||
|
# Contains automatic help generator based comments on target
|
||||||
|
# and other utilities
|
||||||
|
|
||||||
|
define BROWSER_PYSCRIPT
|
||||||
|
import os, webbrowser, sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
from urllib import pathname2url
|
||||||
|
except:
|
||||||
|
from urllib.request import pathname2url
|
||||||
|
|
||||||
|
webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1])))
|
||||||
|
endef
|
||||||
|
export BROWSER_PYSCRIPT
|
||||||
|
|
||||||
|
define PRINT_HELP_PYSCRIPT
|
||||||
|
import re, sys
|
||||||
|
|
||||||
|
for line in sys.stdin:
|
||||||
|
match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line)
|
||||||
|
if match:
|
||||||
|
target, help = match.groups()
|
||||||
|
print("%-20s %s" % (target, help))
|
||||||
|
endef
|
||||||
|
export PRINT_HELP_PYSCRIPT
|
||||||
|
|
||||||
|
BROWSER := python -c "$$BROWSER_PYSCRIPT"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
help: ## make TARGET forwards the TARGET to sub packages
|
||||||
|
@cat $(MAKEFILE_LIST) | python -c "$$PRINT_HELP_PYSCRIPT"
|
||||||
|
|
@ -0,0 +1,63 @@
|
||||||
|
# Suitable for python packages in subdirectories of monorepo root containing Makefile.python
|
||||||
|
.PHONY: clean clean-test clean-pyc clean-build docs help common.mk
|
||||||
|
.DEFAULT_GOAL := help
|
||||||
|
|
||||||
|
include ../common.mk
|
||||||
|
|
||||||
|
clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts
|
||||||
|
|
||||||
|
clean-build: ## remove build artifacts
|
||||||
|
rm -fr build/
|
||||||
|
rm -fr dist/
|
||||||
|
rm -fr .eggs/
|
||||||
|
find . -name '*.egg-info' -exec rm -fr {} +
|
||||||
|
find . -name '*.egg' -exec rm -f {} +
|
||||||
|
|
||||||
|
clean-pyc: ## remove Python file artifacts
|
||||||
|
find . -name '*.pyc' -exec rm -f {} +
|
||||||
|
find . -name '*.pyo' -exec rm -f {} +
|
||||||
|
find . -name '*~' -exec rm -f {} +
|
||||||
|
find . -name '__pycache__' -exec rm -fr {} +
|
||||||
|
|
||||||
|
clean-test: ## remove test and coverage artifacts
|
||||||
|
rm -fr .tox/
|
||||||
|
rm -f .coverage
|
||||||
|
rm -fr htmlcov/
|
||||||
|
rm -fr .pytest_cache
|
||||||
|
|
||||||
|
lint: ## check style with flake8
|
||||||
|
flake8 sia tests
|
||||||
|
|
||||||
|
test: ## run tests quickly with the default Python
|
||||||
|
python setup.py test
|
||||||
|
|
||||||
|
test-all: ## run tests on every Python version with tox
|
||||||
|
tox
|
||||||
|
|
||||||
|
coverage: ## check code coverage quickly with the default Python
|
||||||
|
coverage run --source sia -m pytest
|
||||||
|
coverage report -m
|
||||||
|
coverage html
|
||||||
|
$(BROWSER) htmlcov/index.html
|
||||||
|
|
||||||
|
docs: ## generate Sphinx HTML documentation, including API docs
|
||||||
|
rm -f docs/sia.rst
|
||||||
|
rm -f docs/modules.rst
|
||||||
|
sphinx-apidoc -o docs/ sia
|
||||||
|
$(MAKE) -C docs clean
|
||||||
|
$(MAKE) -C docs html
|
||||||
|
$(BROWSER) docs/_build/html/index.html
|
||||||
|
|
||||||
|
servedocs: docs ## compile the docs watching for changes
|
||||||
|
watchmedo shell-command -p '*.rst' -c '$(MAKE) -C docs html' -R -D .
|
||||||
|
|
||||||
|
release: dist pypi ## package and upload a release to local pypi server
|
||||||
|
twine upload -u "" -p "" --repository-url http://localhost:8080 dist/*
|
||||||
|
|
||||||
|
dist: clean ## builds source and wheel package
|
||||||
|
python setup.py sdist
|
||||||
|
python setup.py bdist_wheel
|
||||||
|
ls -l dist
|
||||||
|
|
||||||
|
install: clean pypi ## install the package to the active Python's site-packages
|
||||||
|
python setup.py install
|
||||||
Loading…
Reference in New Issue