PROJECT=commonnet APPS="commorganization" "commcrawler" PYTHON=python3 help: ## display this help @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n\nTargets:\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-10s\033[0m %s\n", $$1, $$2 }' $(MAKEFILE_LIST) makemessages: ## make messages for translation for DIR in $(APPS); do \ cd $(CURDIR)/$$DIR; \ $(PYTHON) ../manage.py makemessages --all; \ done compilemessages: ## compile messages for translation for DIR in $(APPS); do \ cd $(CURDIR)/$$DIR; \ $(PYTHON) ../manage.py compilemessages ; \ done migrations: ## make DB migrations $(PYTHON) manage.py makemigrations migrate: ## apply DB migrations $(PYTHON) manage.py migrate collectstatic: ## web - collect static files to serve $(PYTHON) manage.py collectstatic --no-input update: collectstatic compilemessages migrate ## update echo "OK" fixture: ## create fixtures (excluded domains) $(PYTHON) manage.py dumpdata --indent 4 --natural-primary \ commcrawler.ExludedDomains > fixtures/commcrawler.json install_fixture: ## install fixtures (excluded domains) $(PYTHON) manage.py loaddata fixtures/commcrawler.json default_import: ## import default data $(PYTHON) manage.py import_csv_communes data_src/communes.csv $(PYTHON) manage.py import_csv_autres data_src/autres.csv regenerate_all: migrate default_import install_fixture ## regenerate all the database $(PYTHON) manage.py createsuperuser crawl: ## launch crawl on the first planified crawler $(PYTHON) manage.py launch_crawl --first-available > /dev/null run: ## run test server $(PYTHON) manage.py runserver 0.0.0.0:8000 generate_graphs: ## generate graph model for documentation $(PYTHON) manage.py graph_models \ -S --arrow-shape normal -g -n -L fr-fr $(APPS) > /tmp/$(PROJECT).dot dot -Tpng /tmp/$(PROJECT).dot > docs/images/models.png rm /tmp/$(PROJECT).dot