Compare commits
7 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
fd867a5ab4
|
|||
|
5377fa4347
|
|||
|
44c344cd1f
|
|||
|
96a75eb4bb
|
|||
|
000625c727
|
|||
| d07dd3208d | |||
| c79aff9b4f |
@@ -1,16 +1,18 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="docker-compose up django" type="docker-deploy" factoryName="docker-compose.yml" server-name="Docker">
|
||||
<configuration default="false" name="Local" type="docker-deploy" factoryName="docker-compose.yml" server-name="Docker (local)">
|
||||
<deployment type="docker-compose.yml">
|
||||
<settings>
|
||||
<option name="envFilePath" value="" />
|
||||
<option name="services">
|
||||
<list>
|
||||
<option value="django" />
|
||||
<option value="postgres" />
|
||||
<option value="nginx-proxy" />
|
||||
</list>
|
||||
</option>
|
||||
<option name="sourceFilePath" value="nas.yml" />
|
||||
<option name="sourceFilePath" value="local.yml" />
|
||||
</settings>
|
||||
</deployment>
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
||||
</component>
|
||||
19
.idea/runConfigurations/migrate.xml
generated
@@ -12,13 +12,16 @@
|
||||
<option name="IS_MODULE_SDK" value="true" />
|
||||
<option name="ADD_CONTENT_ROOTS" value="true" />
|
||||
<option name="ADD_SOURCE_ROOTS" value="true" />
|
||||
<PathMappingSettings>
|
||||
<option name="pathMappings">
|
||||
<list>
|
||||
<mapping local-root="$PROJECT_DIR$" remote-root="/app" />
|
||||
</list>
|
||||
</option>
|
||||
</PathMappingSettings>
|
||||
<EXTENSION ID="net.ashald.envfile">
|
||||
<option name="IS_ENABLED" value="false" />
|
||||
<option name="IS_SUBST" value="false" />
|
||||
<option name="IS_PATH_MACRO_SUPPORTED" value="false" />
|
||||
<option name="IS_IGNORE_MISSING_FILES" value="false" />
|
||||
<option name="IS_ENABLE_EXPERIMENTAL_INTEGRATIONS" value="false" />
|
||||
<ENTRIES>
|
||||
<ENTRY IS_ENABLED="true" PARSER="runconfig" />
|
||||
</ENTRIES>
|
||||
</EXTENSION>
|
||||
<option name="launchJavascriptDebuger" value="false" />
|
||||
<option name="host" value="" />
|
||||
<option name="additionalOptions" value="" />
|
||||
@@ -27,6 +30,6 @@
|
||||
<option name="runNoReload" value="false" />
|
||||
<option name="useCustomRunCommand" value="true" />
|
||||
<option name="customRunCommand" value="migrate" />
|
||||
<method />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
||||
|
||||
@@ -12,6 +12,8 @@ ARG BUILD_ENVIRONMENT=local
|
||||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
# dependencies for building Python packages
|
||||
build-essential \
|
||||
# git
|
||||
git \
|
||||
# psycopg2 dependencies
|
||||
libpq-dev
|
||||
|
||||
@@ -41,6 +43,8 @@ RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
libpq-dev \
|
||||
# Translations dependencies
|
||||
gettext \
|
||||
# git for submodules
|
||||
git \
|
||||
# cleaning up unused files
|
||||
&& apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
@@ -6,4 +6,4 @@ set -o nounset
|
||||
|
||||
|
||||
python manage.py migrate
|
||||
python manage.py runserver_plus 0.0.0.0:8000
|
||||
python manage.py runserver 0.0.0.0:8000
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
ARG PYTHON_VERSION=3.9-slim-bullseye
|
||||
|
||||
# define an alias for the specfic python version used in this file.
|
||||
FROM python:${PYTHON_VERSION} as python
|
||||
|
||||
|
||||
# Python build stage
|
||||
FROM python as python-build-stage
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
|
||||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
# dependencies for building Python packages
|
||||
build-essential \
|
||||
# psycopg2 dependencies
|
||||
libpq-dev \
|
||||
# cleaning up unused files
|
||||
&& apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Requirements are installed here to ensure they will be cached.
|
||||
COPY ./requirements /requirements
|
||||
|
||||
# create python dependency wheels
|
||||
RUN pip wheel --no-cache-dir --no-deps --wheel-dir /usr/src/app/wheels \
|
||||
-r /requirements/local.txt -r /requirements/production.txt \
|
||||
&& rm -rf /requirements
|
||||
|
||||
|
||||
# Python 'run' stage
|
||||
FROM python as python-run-stage
|
||||
|
||||
ARG BUILD_ENVIRONMENT
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
|
||||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
# To run the Makefile
|
||||
make \
|
||||
# psycopg2 dependencies
|
||||
libpq-dev \
|
||||
# Translations dependencies
|
||||
gettext \
|
||||
# Uncomment below lines to enable Sphinx output to latex and pdf
|
||||
# texlive-latex-recommended \
|
||||
# texlive-fonts-recommended \
|
||||
# texlive-latex-extra \
|
||||
# latexmk \
|
||||
# cleaning up unused files
|
||||
&& apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# copy python dependency wheels from python-build-stage
|
||||
COPY --from=python-build-stage /usr/src/app/wheels /wheels
|
||||
|
||||
# use wheels to install python dependencies
|
||||
RUN pip install --no-cache /wheels/* \
|
||||
&& rm -rf /wheels
|
||||
|
||||
COPY ./compose/local/docs/start /start-docs
|
||||
RUN sed -i 's/\r$//g' /start-docs
|
||||
RUN chmod +x /start-docs
|
||||
|
||||
WORKDIR /docs
|
||||
@@ -1,7 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
set -o nounset
|
||||
|
||||
make livehtml
|
||||
@@ -14,6 +14,8 @@ ARG BUILD_ENVIRONMENT=production
|
||||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
# dependencies for building Python packages
|
||||
build-essential \
|
||||
# git for submodules
|
||||
git \
|
||||
# psycopg2 dependencies
|
||||
libpq-dev
|
||||
|
||||
@@ -47,6 +49,8 @@ RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
libpq-dev \
|
||||
# Translations dependencies
|
||||
gettext \
|
||||
# git for submodules
|
||||
git \
|
||||
# cleaning up unused files
|
||||
&& apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
# Minimal makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line, and also
|
||||
# from the environment for the first two.
|
||||
SPHINXOPTS ?=
|
||||
SPHINXBUILD ?= sphinx-build
|
||||
SOURCEDIR = .
|
||||
BUILDDIR = ./_build
|
||||
APP = /app
|
||||
|
||||
.PHONY: help livehtml apidocs Makefile
|
||||
|
||||
# Put it first so that "make" without argument is like "make help".
|
||||
help:
|
||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -c .
|
||||
|
||||
# Build, watch and serve docs with live reload
|
||||
livehtml:
|
||||
sphinx-autobuild -b html --host 0.0.0.0 --port 9000 --watch $(APP) -c . $(SOURCEDIR) $(BUILDDIR)/html
|
||||
|
||||
# Outputs rst files from django application code
|
||||
apidocs:
|
||||
sphinx-apidoc -o $(SOURCEDIR)/api $(APP)
|
||||
|
||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||
%: Makefile
|
||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -c .
|
||||
@@ -1 +0,0 @@
|
||||
# Included so that Django's startproject comment runs against the docs directory
|
||||
63
docs/conf.py
@@ -1,63 +0,0 @@
|
||||
# Configuration file for the Sphinx documentation builder.
|
||||
#
|
||||
# This file only contains a selection of the most common options. For a full
|
||||
# list see the documentation:
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html
|
||||
|
||||
# -- Path setup --------------------------------------------------------------
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import django
|
||||
|
||||
if os.getenv("READTHEDOCS", default=False) == "True":
|
||||
sys.path.insert(0, os.path.abspath(".."))
|
||||
os.environ["DJANGO_READ_DOT_ENV_FILE"] = "True"
|
||||
os.environ["USE_DOCKER"] = "no"
|
||||
else:
|
||||
sys.path.insert(0, os.path.abspath("/app"))
|
||||
os.environ["DATABASE_URL"] = "sqlite:///readthedocs.db"
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
|
||||
django.setup()
|
||||
|
||||
# -- Project information -----------------------------------------------------
|
||||
|
||||
project = "BenchCoach"
|
||||
copyright = """2022, Anthony Correa"""
|
||||
author = "Anthony Correa"
|
||||
|
||||
|
||||
# -- General configuration ---------------------------------------------------
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
"sphinx.ext.autodoc",
|
||||
"sphinx.ext.napoleon",
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
# templates_path = ["_templates"]
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
# This pattern also affects html_static_path and html_extra_path.
|
||||
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
|
||||
|
||||
# -- Options for HTML output -------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
#
|
||||
html_theme = "alabaster"
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
# html_static_path = ["_static"]
|
||||
@@ -1,38 +0,0 @@
|
||||
How To - Project Documentation
|
||||
======================================================================
|
||||
|
||||
Get Started
|
||||
----------------------------------------------------------------------
|
||||
|
||||
Documentation can be written as rst files in `benchcoach/docs`.
|
||||
|
||||
|
||||
To build and serve docs, use the commands::
|
||||
|
||||
docker-compose -f local.yml up docs
|
||||
|
||||
|
||||
|
||||
Changes to files in `docs/_source` will be picked up and reloaded automatically.
|
||||
|
||||
`Sphinx <https://www.sphinx-doc.org/>`_ is the tool used to build documentation.
|
||||
|
||||
Docstrings to Documentation
|
||||
----------------------------------------------------------------------
|
||||
|
||||
The sphinx extension `apidoc <https://www.sphinx-doc.org/en/master/man/sphinx-apidoc.html/>`_ is used to automatically document code using signatures and docstrings.
|
||||
|
||||
Numpy or Google style docstrings will be picked up from project files and available for documentation. See the `Napoleon <https://sphinxcontrib-napoleon.readthedocs.io/en/latest/>`_ extension for details.
|
||||
|
||||
For an in-use example, see the `page source <_sources/users.rst.txt>`_ for :ref:`users`.
|
||||
|
||||
To compile all docstrings automatically into documentation source files, use the command:
|
||||
::
|
||||
|
||||
make apidocs
|
||||
|
||||
|
||||
This can be done in the docker container:
|
||||
::
|
||||
|
||||
docker run --rm docs make apidocs
|
||||
@@ -1,24 +0,0 @@
|
||||
.. BenchCoach documentation master file, created by
|
||||
sphinx-quickstart.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to BenchCoach's documentation!
|
||||
======================================================================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Contents:
|
||||
|
||||
howto
|
||||
pycharm/configuration
|
||||
users
|
||||
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
@@ -1,46 +0,0 @@
|
||||
@ECHO OFF
|
||||
|
||||
pushd %~dp0
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build -c .
|
||||
)
|
||||
set SOURCEDIR=_source
|
||||
set BUILDDIR=_build
|
||||
set APP=..\benchcoach
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
%SPHINXBUILD% >NUL 2>NUL
|
||||
if errorlevel 9009 (
|
||||
echo.
|
||||
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.Install sphinx-autobuild for live serving.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.http://sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
%SPHINXBUILD% -b %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
goto end
|
||||
|
||||
:livehtml
|
||||
sphinx-autobuild -b html --open-browser -p 9000 --watch %APP% -c . %SOURCEDIR% %BUILDDIR%/html
|
||||
GOTO :EOF
|
||||
|
||||
:apidocs
|
||||
sphinx-apidoc -o %SOURCEDIR%/api %APP%
|
||||
GOTO :EOF
|
||||
|
||||
:help
|
||||
%SPHINXBUILD% -b help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
|
||||
:end
|
||||
popd
|
||||
@@ -1,70 +0,0 @@
|
||||
Docker Remote Debugging
|
||||
=======================
|
||||
|
||||
To connect to python remote interpreter inside docker, you have to make sure first, that Pycharm is aware of your docker.
|
||||
|
||||
Go to *Settings > Build, Execution, Deployment > Docker*. If you are on linux, you can use docker directly using its socket `unix:///var/run/docker.sock`, if you are on Windows or Mac, make sure that you have docker-machine installed, then you can simply *Import credentials from Docker Machine*.
|
||||
|
||||
.. image:: images/1.png
|
||||
|
||||
Configure Remote Python Interpreter
|
||||
-----------------------------------
|
||||
|
||||
This repository comes with already prepared "Run/Debug Configurations" for docker.
|
||||
|
||||
.. image:: images/2.png
|
||||
|
||||
But as you can see, at the beginning there is something wrong with them. They have red X on django icon, and they cannot be used, without configuring remote python interpreter. To do that, you have to go to *Settings > Build, Execution, Deployment* first.
|
||||
|
||||
|
||||
Next, you have to add new remote python interpreter, based on already tested deployment settings. Go to *Settings > Project > Project Interpreter*. Click on the cog icon, and click *Add Remote*.
|
||||
|
||||
.. image:: images/3.png
|
||||
|
||||
Switch to *Docker Compose* and select `local.yml` file from directory of your project, next set *Service name* to `django`
|
||||
|
||||
.. image:: images/4.png
|
||||
|
||||
Having that, click *OK*. Close *Settings* panel, and wait few seconds...
|
||||
|
||||
.. image:: images/7.png
|
||||
|
||||
After few seconds, all *Run/Debug Configurations* should be ready to use.
|
||||
|
||||
.. image:: images/8.png
|
||||
|
||||
**Things you can do with provided configuration**:
|
||||
|
||||
* run and debug python code
|
||||
|
||||
.. image:: images/f1.png
|
||||
|
||||
* run and debug tests
|
||||
|
||||
.. image:: images/f2.png
|
||||
.. image:: images/f3.png
|
||||
|
||||
* run and debug migrations or different django management commands
|
||||
|
||||
.. image:: images/f4.png
|
||||
|
||||
* and many others..
|
||||
|
||||
Known issues
|
||||
------------
|
||||
|
||||
* Pycharm hangs on "Connecting to Debugger"
|
||||
|
||||
.. image:: images/issue1.png
|
||||
|
||||
This might be fault of your firewall. Take a look on this ticket - https://youtrack.jetbrains.com/issue/PY-18913
|
||||
|
||||
* Modified files in `.idea` directory
|
||||
|
||||
Most of the files from `.idea/` were added to `.gitignore` with a few exceptions, which were made, to provide "ready to go" configuration. After adding remote interpreter some of these files are altered by PyCharm:
|
||||
|
||||
.. image:: images/issue2.png
|
||||
|
||||
In theory you can remove them from repository, but then, other people will lose a ability to initialize a project from provided configurations as you did. To get rid of this annoying state, you can run command::
|
||||
|
||||
$ git update-index --assume-unchanged benchcoach.iml
|
||||
|
Before Width: | Height: | Size: 66 KiB |
|
Before Width: | Height: | Size: 15 KiB |
|
Before Width: | Height: | Size: 177 KiB |
|
Before Width: | Height: | Size: 110 KiB |
|
Before Width: | Height: | Size: 6.1 KiB |
|
Before Width: | Height: | Size: 19 KiB |
|
Before Width: | Height: | Size: 249 KiB |
|
Before Width: | Height: | Size: 229 KiB |
|
Before Width: | Height: | Size: 230 KiB |
|
Before Width: | Height: | Size: 222 KiB |
|
Before Width: | Height: | Size: 42 KiB |
|
Before Width: | Height: | Size: 11 KiB |
@@ -1,15 +0,0 @@
|
||||
.. _users:
|
||||
|
||||
Users
|
||||
======================================================================
|
||||
|
||||
Starting a new project, it’s highly recommended to set up a custom user model,
|
||||
even if the default User model is sufficient for you.
|
||||
|
||||
This model behaves identically to the default user model,
|
||||
but you’ll be able to customize it in the future if the need arises.
|
||||
|
||||
.. automodule:: benchcoach.users.models
|
||||
:members:
|
||||
:noindex:
|
||||
|
||||
@@ -133,7 +133,10 @@ def gen_image(
|
||||
|
||||
# First line: Date
|
||||
font = ImageFont.truetype(font_regular_path, 62)
|
||||
text = f"{date:%a, %B %-d %-I:%M %p}".upper()
|
||||
if len(f"{date:%B}") <= 4:
|
||||
text = f"{date:%a, %B %-d %-I:%M %p}".upper()
|
||||
else:
|
||||
text = f"{date:%a, %b %-d %-I:%M %p}".upper()
|
||||
# text = date
|
||||
loc = (1050, 280)
|
||||
section_info_draw.text(loc, text, (14, 42, 28), font=font, anchor="ra")
|
||||
@@ -267,7 +270,10 @@ def gen_results_image(
|
||||
)
|
||||
|
||||
# Second line: Date
|
||||
text = f"{date:%a, %B %-d %-I:%M %p}".upper()
|
||||
if len(f"{date:%B}") <= 4:
|
||||
text = f"{date:%a, %B %-d %-I:%M %p}".upper()
|
||||
else:
|
||||
text = f"{date:%a, %b %-d %-I:%M %p}".upper()
|
||||
# text = date
|
||||
font = ImageFont.truetype(font_condensed_path, 34)
|
||||
loc = (1050, 355)
|
||||
|
||||
@@ -107,7 +107,7 @@ def get_matchup_image(request, team_id, event_id, dimensions=None, background=No
|
||||
image = gen_image(
|
||||
**game_info, background=BACKGROUND, width=width, height=height
|
||||
)
|
||||
elif game_info["runs_for"] and game_info["runs_against"]:
|
||||
elif game_info["runs_for"] or game_info["runs_against"]:
|
||||
image = gen_results_image(
|
||||
**game_info, background=BACKGROUND, width=width, height=height
|
||||
)
|
||||
|
||||
86
linode.yml
@@ -1,86 +0,0 @@
|
||||
version: '3'
|
||||
|
||||
volumes:
|
||||
benchcoach_local_postgres_data: {}
|
||||
benchcoach_local_postgres_data_backups: {}
|
||||
certs: {}
|
||||
vhost: {}
|
||||
html: {}
|
||||
acme: {}
|
||||
|
||||
services:
|
||||
django:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./compose/linode/django/Dockerfile
|
||||
image: benchcoach_local_django
|
||||
container_name: benchcoach_local_django
|
||||
platform: linux/x86_64
|
||||
depends_on:
|
||||
- postgres
|
||||
volumes:
|
||||
- /root/teamsnap-benchcoach:/app:z
|
||||
env_file:
|
||||
- ./.envs/.linode/.django
|
||||
- ./.envs/.linode/.postgres
|
||||
ports:
|
||||
- "8000:8000"
|
||||
command: /start
|
||||
|
||||
postgres:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./compose/production/postgres/Dockerfile
|
||||
image: benchcoach_production_postgres
|
||||
container_name: benchcoach_local_postgres
|
||||
volumes:
|
||||
- benchcoach_local_postgres_data:/var/lib/postgresql/data:Z
|
||||
- benchcoach_local_postgres_data_backups:/backups:z
|
||||
env_file:
|
||||
- ./.envs/.linode/.postgres
|
||||
|
||||
docs:
|
||||
image: benchcoach_local_docs
|
||||
container_name: benchcoach_local_docs
|
||||
platform: linux/x86_64
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./compose/linode/docs/Dockerfile
|
||||
env_file:
|
||||
- ./.envs/.linode/.django
|
||||
volumes:
|
||||
- /root/teamsnap-benchcoach/docs:/docs:z
|
||||
- /root/teamsnap-benchcoach/config:/app/config:z
|
||||
- /root/teamsnap-benchcoach/benchcoach:/app/benchcoach:z
|
||||
ports:
|
||||
- "9000:9000"
|
||||
command: /start-docs
|
||||
|
||||
nginx-proxy:
|
||||
image: jwilder/nginx-proxy:alpine
|
||||
container_name: nginx-proxy
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
volumes:
|
||||
- /var/run/docker.sock:/tmp/docker.sock:ro
|
||||
- certs:/etc/nginx/certs
|
||||
- vhost:/etc/nginx/vhost.d
|
||||
- html:/usr/share/nginx/html
|
||||
env_file:
|
||||
- ./.envs/.linode/.nginx-proxy
|
||||
restart: always
|
||||
depends_on:
|
||||
- django
|
||||
|
||||
nginx-proxy-acme:
|
||||
image: nginxproxy/acme-companion
|
||||
container_name: nginx-proxy-acme
|
||||
volumes_from:
|
||||
- nginx-proxy
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
- certs:/etc/nginx/certs:rw
|
||||
- acme:/etc/acme.sh
|
||||
env_file:
|
||||
- ./.envs/.linode/.nginx-proxy-acme
|
||||
29
local.yml
@@ -19,39 +19,22 @@ services:
|
||||
env_file:
|
||||
- ./.envs/.local/.django
|
||||
- ./.envs/.local/.postgres
|
||||
ports:
|
||||
- "8000:8000"
|
||||
command: /start
|
||||
|
||||
postgres:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./compose/production/postgres/Dockerfile
|
||||
image: benchcoach_production_postgres
|
||||
image: benchcoach_local_postgres
|
||||
container_name: benchcoach_local_postgres
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
- benchcoach_local_postgres_data:/var/lib/postgresql/data:Z
|
||||
- benchcoach_local_postgres_data_backups:/backups:z
|
||||
- benchcoach_local_postgres_data:/var/lib/postgresql/data
|
||||
- benchcoach_local_postgres_data_backups:/backups
|
||||
env_file:
|
||||
- ./.envs/.local/.postgres
|
||||
|
||||
docs:
|
||||
image: benchcoach_local_docs
|
||||
container_name: benchcoach_local_docs
|
||||
platform: linux/amd64
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./compose/local/docs/Dockerfile
|
||||
env_file:
|
||||
- ./.envs/.local/.django
|
||||
volumes:
|
||||
- ./docs:/docs:z
|
||||
- ./config:/app/config:z
|
||||
- ./benchcoach:/app/benchcoach:z
|
||||
ports:
|
||||
- "9000:9000"
|
||||
command: /start-docs
|
||||
|
||||
nginx-proxy:
|
||||
image: jwilder/nginx-proxy:alpine
|
||||
container_name: nginx-proxy
|
||||
@@ -64,3 +47,5 @@ services:
|
||||
restart: always
|
||||
depends_on:
|
||||
- django
|
||||
env_file:
|
||||
- ./.envs/.local/.nginx-proxy
|
||||
|
||||
66
nas.yml
@@ -1,66 +0,0 @@
|
||||
version: '3'
|
||||
|
||||
volumes:
|
||||
benchcoach_local_postgres_data: {}
|
||||
benchcoach_local_postgres_data_backups: {}
|
||||
|
||||
services:
|
||||
django:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./compose/local/django/Dockerfile
|
||||
image: benchcoach_local_django
|
||||
container_name: benchcoach_local_django
|
||||
platform: linux/x86_64
|
||||
depends_on:
|
||||
- postgres
|
||||
volumes:
|
||||
- /volume1/docker/benchcoach_nas:/app:z
|
||||
env_file:
|
||||
- ./.envs/.local/.django
|
||||
- ./.envs/.local/.postgres
|
||||
ports:
|
||||
- "8000:8000"
|
||||
command: /start
|
||||
|
||||
postgres:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./compose/production/postgres/Dockerfile
|
||||
image: benchcoach_production_postgres
|
||||
container_name: benchcoach_local_postgres
|
||||
volumes:
|
||||
- benchcoach_local_postgres_data:/var/lib/postgresql/data:Z
|
||||
- benchcoach_local_postgres_data_backups:/backups:z
|
||||
env_file:
|
||||
- ./.envs/.local/.postgres
|
||||
|
||||
docs:
|
||||
image: benchcoach_local_docs
|
||||
container_name: benchcoach_local_docs
|
||||
platform: linux/x86_64
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./compose/local/docs/Dockerfile
|
||||
env_file:
|
||||
- ./.envs/.local/.django
|
||||
volumes:
|
||||
- /volume1/docker/benchcoach_nas/docs:/docs:z
|
||||
- /volume1/docker/benchcoach_nas/config:/app/config:z
|
||||
- /volume1/docker/benchcoach_nas/benchcoach:/app/benchcoach:z
|
||||
ports:
|
||||
- "9000:9000"
|
||||
command: /start-docs
|
||||
|
||||
nginx-proxy:
|
||||
image: jwilder/nginx-proxy:alpine
|
||||
container_name: nginx-proxy
|
||||
ports:
|
||||
- "8001:80"
|
||||
- "8002:443"
|
||||
volumes:
|
||||
- /var/run/docker.sock:/tmp/docker.sock:ro
|
||||
- /volume1/docker/benchcoach_nas/certs:/etc/nginx/certs
|
||||
restart: always
|
||||
depends_on:
|
||||
- django
|
||||
@@ -1,5 +1,5 @@
|
||||
version: '3'
|
||||
|
||||
name: benchcoach
|
||||
volumes:
|
||||
production_postgres_data: {}
|
||||
production_postgres_data_backups: {}
|
||||
|
||||
@@ -18,7 +18,7 @@ django-redis==5.2.0 # https://github.com/jazzband/django-redis
|
||||
|
||||
api-client
|
||||
|
||||
-e git+ssh://gituser@home.ascorrea.com/~/pyteamsnap.git#egg=pyteamsnap
|
||||
-e git+ssh://gituser@home.ascorrea.com/~/gamescrapyr.git#egg=gamescrapyr
|
||||
-e git+https://gitea.ascorrea.com/asc/pyteamsnap@2022#egg=pyteamsnap
|
||||
-e git+https://gitea.ascorrea.com/asc/gamescrapyr.git#egg=gamescrapyr
|
||||
|
||||
beautifulsoup4==4.11.1
|
||||
|
||||
@@ -37,6 +37,14 @@
|
||||
postSubmit.addEventListener("click", e => {
|
||||
e.preventDefault();
|
||||
formData = new FormData(postSubmit.form);
|
||||
for (player_lineup_row of postSubmit.form.querySelectorAll("[class=player-lineup-row]")) {
|
||||
event_lineup_entry_id_input = player_lineup_row.querySelector('[id$="event_lineup_entry_id"]')
|
||||
sequence = player_lineup_row.querySelector('[id$="sequence"]').value
|
||||
position = player_lineup_row.dataset['position']
|
||||
if (position == '' && sequence == '') {
|
||||
event_lineup_entry_id_input.value = ''
|
||||
}
|
||||
}
|
||||
fetch(postSubmit.formAction, {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
@@ -44,10 +52,20 @@
|
||||
.then(response => response)
|
||||
.then(data => {
|
||||
if (data.ok) {
|
||||
data.json().then(response_data => {
|
||||
for (event_lineup_entry of response_data){
|
||||
player_lineup_row = postSubmit.form.querySelector(`[data-player-id="${event_lineup_entry['member_id']}"]`)
|
||||
event_lineup_entry_id_input = player_lineup_row.querySelector('[id$="event_lineup_entry_id"]')
|
||||
event_lineup_entry_id_input.value = event_lineup_entry['id']
|
||||
event_lineup_id_input = player_lineup_row.querySelector('[id$="event_lineup_id"]')
|
||||
event_lineup_id_input.value = event_lineup_entry['event_lineup_id']
|
||||
}
|
||||
})
|
||||
document.querySelector("#popup-messages-content").innerHTML = `<div class="alert alert-dismissible alert-success" role="alert">
|
||||
<strong>Success!</strong>
|
||||
<button type="button" class="btn-close" data-bs-dismiss="alert" aria-label="Close"></button>
|
||||
</div> `
|
||||
|
||||
}
|
||||
else {
|
||||
document.querySelector("#popup-messages-content").innerHTML = `<div class="alert alert-dismissible alert-danger" role="alert">
|
||||
@@ -57,7 +75,10 @@
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error('Error:', error);
|
||||
document.querySelector("#popup-messages-content").innerHTML = `<div class="alert alert-dismissible alert-danger" role="alert">
|
||||
<strong>Not Success! Some other error! </strong>
|
||||
<button type="button" class="btn-close" data-bs-dismiss="alert" aria-label="Close"></button>
|
||||
</div> `
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
<table class="table table-sm my-0 table-{{ table_id }}" style="min-height: 1rem">
|
||||
<tbody class="tbody-{{ table_id }}">
|
||||
{% for form in formset %}
|
||||
<tr data-player-id="{{ form.member.data.id }}"
|
||||
<tr class="player-lineup-row" data-player-id="{{ form.member.data.id }}"
|
||||
data-position="{{ form.label.value }}"
|
||||
data-order="{{ form.sequence.value }}"
|
||||
data-player-name="{{ form.member.data.last_name }}, {{ form.member.data.first_name }}"
|
||||
@@ -16,14 +16,12 @@
|
||||
{{ form.position_only.as_hidden }}
|
||||
{{ form.label.as_hidden }}
|
||||
{{ form.member_name.as_hidden }}
|
||||
{{ form.sequence.as_hidden }}
|
||||
<th class="col-1" id="sequence-member-{{ form.member.data.id }}">
|
||||
{{ form.sequence.value | add:"1" }}
|
||||
</th>
|
||||
<td class="col-1">
|
||||
<div class="mx-1">
|
||||
<span id="player-order-form-{{ form.member.id }}" class="lineup-sequence-form">
|
||||
{{ form.sequence.as_hidden }}
|
||||
</span>
|
||||
|
||||
<span id="player-order-{{ form.member.id }}" class="lineup-sequence-value">
|
||||
{% if form.order.value > 0 %}{{ form.order.value | add:"1" }}{% endif %}
|
||||
@@ -33,7 +31,6 @@
|
||||
{% if form.availability.data.status_code == 2 %}
|
||||
<i class="bi bi-question-circle-fill text-info"></i>
|
||||
{% elif form.availability.data.status_code == 1 %}
|
||||
{# <i class="bi bi-check-circle-fill text-success"></i>#}
|
||||
<i class="bi bi-check-circle-fill text-success"></i>
|
||||
{% elif form.availability.data.status_code == 0 %}
|
||||
<i class="bi bi-x-circle-fill text-danger"></i>
|
||||
|
||||
@@ -25,6 +25,7 @@ def edit_lineup(request, event_ids, team_id):
|
||||
EventLineupEntry,
|
||||
Member,
|
||||
)
|
||||
|
||||
from teamsnap.forms import LineupEntryFormset
|
||||
|
||||
client = get_teamsnap_client(request)
|
||||
@@ -199,11 +200,11 @@ def edit_lineup(request, event_ids, team_id):
|
||||
|
||||
|
||||
def submit_lineup(request, team_id, event_id):
|
||||
from pyteamsnap.objects import Event, EventLineup, EventLineupEntry
|
||||
from pyteamsnap.objects import EventLineup, EventLineupEntry
|
||||
|
||||
from teamsnap.forms import LineupEntryFormset
|
||||
|
||||
client = get_teamsnap_client(request)
|
||||
ts_event = Event.get(client, event_id)
|
||||
ts_lineup = EventLineup.search(client, event_id=event_id)
|
||||
event_lineup_id = ts_lineup[0].data["id"]
|
||||
if request.GET:
|
||||
@@ -247,7 +248,8 @@ def submit_lineup(request, team_id, event_id):
|
||||
pass
|
||||
else:
|
||||
pass
|
||||
return JsonResponse(ts_event.data)
|
||||
ts_event_lineup_entries = EventLineupEntry.search(client, event_id=event_id)
|
||||
return JsonResponse([lue.data for lue in ts_event_lineup_entries], safe=False)
|
||||
return HttpResponseServerError
|
||||
|
||||
|
||||
@@ -258,7 +260,6 @@ def multi_lineup_choose(request, team_id=None):
|
||||
team_id=request.user.teamsnap_preferences.managed_team_id,
|
||||
)
|
||||
from django.forms import formset_factory
|
||||
|
||||
from pyteamsnap.objects import Event
|
||||
|
||||
from .forms import EventChooseForm
|
||||
|
||||