Init Repo
This commit is contained in:
commit
29fa72c456
|
|
@ -0,0 +1,225 @@
|
|||
# File created using '.gitignore Generator' for Visual Studio Code: https://bit.ly/vscode-gig
|
||||
# Created by https://www.toptal.com/developers/gitignore/api/windows,visualstudiocode,python
|
||||
# Edit at https://www.toptal.com/developers/gitignore?templates=windows,visualstudiocode,python
|
||||
|
||||
### Python ###
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/#use-with-ide
|
||||
.pdm.toml
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
### Python Patch ###
|
||||
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
|
||||
poetry.toml
|
||||
|
||||
# ruff
|
||||
.ruff_cache/
|
||||
|
||||
# LSP config files
|
||||
pyrightconfig.json
|
||||
|
||||
### VisualStudioCode ###
|
||||
.vscode/*
|
||||
!.vscode/settings.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/extensions.json
|
||||
!.vscode/*.code-snippets
|
||||
|
||||
# Local History for Visual Studio Code
|
||||
.history/
|
||||
|
||||
# Built Visual Studio Code Extensions
|
||||
*.vsix
|
||||
|
||||
### VisualStudioCode Patch ###
|
||||
# Ignore all local history of files
|
||||
.history
|
||||
.ionide
|
||||
|
||||
### Windows ###
|
||||
# Windows thumbnail cache files
|
||||
Thumbs.db
|
||||
Thumbs.db:encryptable
|
||||
ehthumbs.db
|
||||
ehthumbs_vista.db
|
||||
|
||||
# Dump file
|
||||
*.stackdump
|
||||
|
||||
# Folder config file
|
||||
[Dd]esktop.ini
|
||||
|
||||
# Recycle Bin used on file shares
|
||||
$RECYCLE.BIN/
|
||||
|
||||
# Windows Installer files
|
||||
*.cab
|
||||
*.msi
|
||||
*.msix
|
||||
*.msm
|
||||
*.msp
|
||||
|
||||
# Windows shortcuts
|
||||
*.lnk
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/windows,visualstudiocode,python
|
||||
|
||||
# Custom rules (everything added below won't be overriden by 'Generate .gitignore File' if you use 'Update' option)
|
||||
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
# Resful API for Stage Hub
|
||||
|
||||
## Requirement
|
||||
* Python 3.7+
|
||||
* Flask
|
||||
* Flask_restful
|
||||
* Flask_resless
|
||||
* Flask_sqlalchemy
|
||||
* Flask_marshmallow
|
||||
* virtualenv
|
||||
* sqlaechemy
|
||||
|
||||
## Usage
|
||||
```
|
||||
.\Scripts\active.bat
|
||||
python run.py
|
||||
```
|
||||
|
|
@ -0,0 +1,87 @@
|
|||
# This file must be used with "source bin/activate" *from bash*
|
||||
# you cannot run it directly
|
||||
|
||||
|
||||
if [ "${BASH_SOURCE-}" = "$0" ]; then
|
||||
echo "You must source this script: \$ source $0" >&2
|
||||
exit 33
|
||||
fi
|
||||
|
||||
deactivate () {
|
||||
unset -f pydoc >/dev/null 2>&1 || true
|
||||
|
||||
# reset old environment variables
|
||||
# ! [ -z ${VAR+_} ] returns true if VAR is declared at all
|
||||
if ! [ -z "${_OLD_VIRTUAL_PATH:+_}" ] ; then
|
||||
PATH="$_OLD_VIRTUAL_PATH"
|
||||
export PATH
|
||||
unset _OLD_VIRTUAL_PATH
|
||||
fi
|
||||
if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
|
||||
PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
|
||||
export PYTHONHOME
|
||||
unset _OLD_VIRTUAL_PYTHONHOME
|
||||
fi
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
|
||||
hash -r 2>/dev/null
|
||||
fi
|
||||
|
||||
if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
|
||||
PS1="$_OLD_VIRTUAL_PS1"
|
||||
export PS1
|
||||
unset _OLD_VIRTUAL_PS1
|
||||
fi
|
||||
|
||||
unset VIRTUAL_ENV
|
||||
if [ ! "${1-}" = "nondestructive" ] ; then
|
||||
# Self destruct!
|
||||
unset -f deactivate
|
||||
fi
|
||||
}
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate nondestructive
|
||||
|
||||
VIRTUAL_ENV='C:\workspace\StageHub_vue\backend'
|
||||
if ([ "$OSTYPE" = "cygwin" ] || [ "$OSTYPE" = "msys" ]) && $(command -v cygpath &> /dev/null) ; then
|
||||
VIRTUAL_ENV=$(cygpath -u "$VIRTUAL_ENV")
|
||||
fi
|
||||
export VIRTUAL_ENV
|
||||
|
||||
_OLD_VIRTUAL_PATH="$PATH"
|
||||
PATH="$VIRTUAL_ENV/Scripts:$PATH"
|
||||
export PATH
|
||||
|
||||
# unset PYTHONHOME if set
|
||||
if ! [ -z "${PYTHONHOME+_}" ] ; then
|
||||
_OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
|
||||
unset PYTHONHOME
|
||||
fi
|
||||
|
||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
|
||||
_OLD_VIRTUAL_PS1="${PS1-}"
|
||||
if [ "x" != x ] ; then
|
||||
PS1="${PS1-}"
|
||||
else
|
||||
PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}"
|
||||
fi
|
||||
export PS1
|
||||
fi
|
||||
|
||||
# Make sure to unalias pydoc if it's already there
|
||||
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true
|
||||
|
||||
pydoc () {
|
||||
python -m pydoc "$@"
|
||||
}
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
|
||||
hash -r 2>/dev/null
|
||||
fi
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
@echo off
|
||||
|
||||
set "VIRTUAL_ENV=C:\workspace\StageHub_vue\backend"
|
||||
|
||||
if defined _OLD_VIRTUAL_PROMPT (
|
||||
set "PROMPT=%_OLD_VIRTUAL_PROMPT%"
|
||||
) else (
|
||||
if not defined PROMPT (
|
||||
set "PROMPT=$P$G"
|
||||
)
|
||||
if not defined VIRTUAL_ENV_DISABLE_PROMPT (
|
||||
set "_OLD_VIRTUAL_PROMPT=%PROMPT%"
|
||||
)
|
||||
)
|
||||
if not defined VIRTUAL_ENV_DISABLE_PROMPT (
|
||||
set "ENV_PROMPT="
|
||||
if NOT DEFINED ENV_PROMPT (
|
||||
for %%d in ("%VIRTUAL_ENV%") do set "ENV_PROMPT=(%%~nxd) "
|
||||
)
|
||||
)
|
||||
set "PROMPT=%ENV_PROMPT%%PROMPT%"
|
||||
)
|
||||
|
||||
REM Don't use () to avoid problems with them in %PATH%
|
||||
if defined _OLD_VIRTUAL_PYTHONHOME goto ENDIFVHOME
|
||||
set "_OLD_VIRTUAL_PYTHONHOME=%PYTHONHOME%"
|
||||
:ENDIFVHOME
|
||||
|
||||
set PYTHONHOME=
|
||||
|
||||
REM if defined _OLD_VIRTUAL_PATH (
|
||||
if not defined _OLD_VIRTUAL_PATH goto ENDIFVPATH1
|
||||
set "PATH=%_OLD_VIRTUAL_PATH%"
|
||||
:ENDIFVPATH1
|
||||
REM ) else (
|
||||
if defined _OLD_VIRTUAL_PATH goto ENDIFVPATH2
|
||||
set "_OLD_VIRTUAL_PATH=%PATH%"
|
||||
:ENDIFVPATH2
|
||||
|
||||
set "PATH=%VIRTUAL_ENV%\Scripts;%PATH%"
|
||||
|
|
@ -0,0 +1,100 @@
|
|||
# This file must be used using `source bin/activate.fish` *within a running fish ( http://fishshell.com ) session*.
|
||||
# Do not run it directly.
|
||||
|
||||
function _bashify_path -d "Converts a fish path to something bash can recognize"
|
||||
set fishy_path $argv
|
||||
set bashy_path $fishy_path[1]
|
||||
for path_part in $fishy_path[2..-1]
|
||||
set bashy_path "$bashy_path:$path_part"
|
||||
end
|
||||
echo $bashy_path
|
||||
end
|
||||
|
||||
function _fishify_path -d "Converts a bash path to something fish can recognize"
|
||||
echo $argv | tr ':' '\n'
|
||||
end
|
||||
|
||||
function deactivate -d 'Exit virtualenv mode and return to the normal environment.'
|
||||
# reset old environment variables
|
||||
if test -n "$_OLD_VIRTUAL_PATH"
|
||||
# https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling
|
||||
if test (echo $FISH_VERSION | head -c 1) -lt 3
|
||||
set -gx PATH (_fishify_path "$_OLD_VIRTUAL_PATH")
|
||||
else
|
||||
set -gx PATH "$_OLD_VIRTUAL_PATH"
|
||||
end
|
||||
set -e _OLD_VIRTUAL_PATH
|
||||
end
|
||||
|
||||
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||
set -gx PYTHONHOME "$_OLD_VIRTUAL_PYTHONHOME"
|
||||
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||
end
|
||||
|
||||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||
and functions -q _old_fish_prompt
|
||||
# Set an empty local `$fish_function_path` to allow the removal of `fish_prompt` using `functions -e`.
|
||||
set -l fish_function_path
|
||||
|
||||
# Erase virtualenv's `fish_prompt` and restore the original.
|
||||
functions -e fish_prompt
|
||||
functions -c _old_fish_prompt fish_prompt
|
||||
functions -e _old_fish_prompt
|
||||
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||
end
|
||||
|
||||
set -e VIRTUAL_ENV
|
||||
|
||||
if test "$argv[1]" != 'nondestructive'
|
||||
# Self-destruct!
|
||||
functions -e pydoc
|
||||
functions -e deactivate
|
||||
functions -e _bashify_path
|
||||
functions -e _fishify_path
|
||||
end
|
||||
end
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
set -gx VIRTUAL_ENV 'C:\workspace\StageHub_vue\backend'
|
||||
|
||||
# https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling
|
||||
if test (echo $FISH_VERSION | head -c 1) -lt 3
|
||||
set -gx _OLD_VIRTUAL_PATH (_bashify_path $PATH)
|
||||
else
|
||||
set -gx _OLD_VIRTUAL_PATH "$PATH"
|
||||
end
|
||||
set -gx PATH "$VIRTUAL_ENV"'/Scripts' $PATH
|
||||
|
||||
# Unset `$PYTHONHOME` if set.
|
||||
if set -q PYTHONHOME
|
||||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||
set -e PYTHONHOME
|
||||
end
|
||||
|
||||
function pydoc
|
||||
python -m pydoc $argv
|
||||
end
|
||||
|
||||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||
# Copy the current `fish_prompt` function as `_old_fish_prompt`.
|
||||
functions -c fish_prompt _old_fish_prompt
|
||||
|
||||
function fish_prompt
|
||||
# Run the user's prompt first; it might depend on (pipe)status.
|
||||
set -l prompt (_old_fish_prompt)
|
||||
|
||||
# Prompt override provided?
|
||||
# If not, just prepend the environment name.
|
||||
if test -n ''
|
||||
printf '%s%s' '' (set_color normal)
|
||||
else
|
||||
printf '%s(%s) ' (set_color normal) (basename "$VIRTUAL_ENV")
|
||||
end
|
||||
|
||||
string join -- \n $prompt # handle multi-line prompts
|
||||
end
|
||||
|
||||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||
end
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
$script:THIS_PATH = $myinvocation.mycommand.path
|
||||
$script:BASE_DIR = Split-Path (Resolve-Path "$THIS_PATH/..") -Parent
|
||||
|
||||
function global:deactivate([switch] $NonDestructive) {
|
||||
if (Test-Path variable:_OLD_VIRTUAL_PATH) {
|
||||
$env:PATH = $variable:_OLD_VIRTUAL_PATH
|
||||
Remove-Variable "_OLD_VIRTUAL_PATH" -Scope global
|
||||
}
|
||||
|
||||
if (Test-Path function:_old_virtual_prompt) {
|
||||
$function:prompt = $function:_old_virtual_prompt
|
||||
Remove-Item function:\_old_virtual_prompt
|
||||
}
|
||||
|
||||
if ($env:VIRTUAL_ENV) {
|
||||
Remove-Item env:VIRTUAL_ENV -ErrorAction SilentlyContinue
|
||||
}
|
||||
|
||||
if (!$NonDestructive) {
|
||||
# Self destruct!
|
||||
Remove-Item function:deactivate
|
||||
Remove-Item function:pydoc
|
||||
}
|
||||
}
|
||||
|
||||
function global:pydoc {
|
||||
python -m pydoc $args
|
||||
}
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate -nondestructive
|
||||
|
||||
$VIRTUAL_ENV = $BASE_DIR
|
||||
$env:VIRTUAL_ENV = $VIRTUAL_ENV
|
||||
|
||||
New-Variable -Scope global -Name _OLD_VIRTUAL_PATH -Value $env:PATH
|
||||
|
||||
$env:PATH = "$env:VIRTUAL_ENV/Scripts;" + $env:PATH
|
||||
if (!$env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
||||
function global:_old_virtual_prompt {
|
||||
""
|
||||
}
|
||||
$function:_old_virtual_prompt = $function:prompt
|
||||
|
||||
if ("" -ne "") {
|
||||
function global:prompt {
|
||||
# Add the custom prefix to the existing prompt
|
||||
$previous_prompt_value = & $function:_old_virtual_prompt
|
||||
("" + $previous_prompt_value)
|
||||
}
|
||||
}
|
||||
else {
|
||||
function global:prompt {
|
||||
# Add a prefix to the current prompt, but don't discard it.
|
||||
$previous_prompt_value = & $function:_old_virtual_prompt
|
||||
$new_prompt_value = "($( Split-Path $env:VIRTUAL_ENV -Leaf )) "
|
||||
($new_prompt_value + $previous_prompt_value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,46 @@
|
|||
"""Xonsh activate script for virtualenv"""
|
||||
from xonsh.tools import get_sep as _get_sep
|
||||
|
||||
def _deactivate(args):
|
||||
if "pydoc" in aliases:
|
||||
del aliases["pydoc"]
|
||||
|
||||
if ${...}.get("_OLD_VIRTUAL_PATH", ""):
|
||||
$PATH = $_OLD_VIRTUAL_PATH
|
||||
del $_OLD_VIRTUAL_PATH
|
||||
|
||||
if ${...}.get("_OLD_VIRTUAL_PYTHONHOME", ""):
|
||||
$PYTHONHOME = $_OLD_VIRTUAL_PYTHONHOME
|
||||
del $_OLD_VIRTUAL_PYTHONHOME
|
||||
|
||||
if "VIRTUAL_ENV" in ${...}:
|
||||
del $VIRTUAL_ENV
|
||||
|
||||
if "VIRTUAL_ENV_PROMPT" in ${...}:
|
||||
del $VIRTUAL_ENV_PROMPT
|
||||
|
||||
if "nondestructive" not in args:
|
||||
# Self destruct!
|
||||
del aliases["deactivate"]
|
||||
|
||||
|
||||
# unset irrelevant variables
|
||||
_deactivate(["nondestructive"])
|
||||
aliases["deactivate"] = _deactivate
|
||||
|
||||
$VIRTUAL_ENV = r"C:\workspace\StageHub_vue\backend"
|
||||
|
||||
$_OLD_VIRTUAL_PATH = $PATH
|
||||
$PATH = $PATH[:]
|
||||
$PATH.add($VIRTUAL_ENV + _get_sep() + "Scripts", front=True, replace=True)
|
||||
|
||||
if ${...}.get("PYTHONHOME", ""):
|
||||
# unset PYTHONHOME if set
|
||||
$_OLD_VIRTUAL_PYTHONHOME = $PYTHONHOME
|
||||
del $PYTHONHOME
|
||||
|
||||
$VIRTUAL_ENV_PROMPT = ""
|
||||
if not $VIRTUAL_ENV_PROMPT:
|
||||
del $VIRTUAL_ENV_PROMPT
|
||||
|
||||
aliases["pydoc"] = ["python", "-m", "pydoc"]
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Activate virtualenv for current interpreter:
|
||||
|
||||
Use exec(open(this_file).read(), {'__file__': this_file}).
|
||||
|
||||
This can be used when you must use an existing Python interpreter, not the virtualenv bin/python.
|
||||
"""
|
||||
import os
|
||||
import site
|
||||
import sys
|
||||
|
||||
try:
|
||||
abs_file = os.path.abspath(__file__)
|
||||
except NameError:
|
||||
raise AssertionError("You must use exec(open(this_file).read(), {'__file__': this_file}))")
|
||||
|
||||
bin_dir = os.path.dirname(abs_file)
|
||||
base = bin_dir[: -len("Scripts") - 1] # strip away the bin part from the __file__, plus the path separator
|
||||
|
||||
# prepend bin to PATH (this file is inside the bin directory)
|
||||
os.environ["PATH"] = os.pathsep.join([bin_dir] + os.environ.get("PATH", "").split(os.pathsep))
|
||||
os.environ["VIRTUAL_ENV"] = base # virtual env is right above bin directory
|
||||
|
||||
# add the virtual environments libraries to the host python import mechanism
|
||||
prev_length = len(sys.path)
|
||||
for lib in "..\Lib\site-packages".split(os.pathsep):
|
||||
path = os.path.realpath(os.path.join(bin_dir, lib))
|
||||
site.addsitedir(path.decode("utf-8") if "" else path)
|
||||
sys.path[:] = sys.path[prev_length:] + sys.path[0:prev_length]
|
||||
|
||||
sys.real_prefix = sys.prefix
|
||||
sys.prefix = base
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -0,0 +1,19 @@
|
|||
@echo off
|
||||
|
||||
set VIRTUAL_ENV=
|
||||
|
||||
REM Don't use () to avoid problems with them in %PATH%
|
||||
if not defined _OLD_VIRTUAL_PROMPT goto ENDIFVPROMPT
|
||||
set "PROMPT=%_OLD_VIRTUAL_PROMPT%"
|
||||
set _OLD_VIRTUAL_PROMPT=
|
||||
:ENDIFVPROMPT
|
||||
|
||||
if not defined _OLD_VIRTUAL_PYTHONHOME goto ENDIFVHOME
|
||||
set "PYTHONHOME=%_OLD_VIRTUAL_PYTHONHOME%"
|
||||
set _OLD_VIRTUAL_PYTHONHOME=
|
||||
:ENDIFVHOME
|
||||
|
||||
if not defined _OLD_VIRTUAL_PATH goto ENDIFVPATH
|
||||
set "PATH=%_OLD_VIRTUAL_PATH%"
|
||||
set _OLD_VIRTUAL_PATH=
|
||||
:ENDIFVPATH
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -0,0 +1 @@
|
|||
python.exe -m pydoc %*
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -0,0 +1,29 @@
|
|||
|
||||
from flask import Flask
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
from flask_marshmallow import Marshmallow
|
||||
from flask_restful import Api, Resource, url_for, reqparse, request
|
||||
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
|
||||
|
||||
app = Flask(__name__)
|
||||
app.config['SECRET_KEY'] = ''
|
||||
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///site.db'
|
||||
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
|
||||
|
||||
db = SQLAlchemy(app)
|
||||
ma = Marshmallow(app)
|
||||
|
||||
# from StageHubAPI.main.routes import main
|
||||
# app.register_blueprint(main)
|
||||
|
||||
from StageHubAPI.main.routes import main_bp
|
||||
app.register_blueprint(main_bp)
|
||||
from StageHubAPI.project.routes import project_bp
|
||||
app.register_blueprint(project_bp)
|
||||
from StageHubAPI.account.routes import account_bp
|
||||
app.register_blueprint(account_bp)
|
||||
|
||||
def create_app():
|
||||
db.init_app(app)
|
||||
ma.init_app(app)
|
||||
return app
|
||||
|
|
@ -0,0 +1,80 @@
|
|||
import json
|
||||
import re
|
||||
|
||||
from flask import Blueprint, Response, jsonify, render_template, url_for, request
|
||||
from StageHubAPI import Api, Resource, app, ma, SQLAlchemyAutoSchema, reqparse
|
||||
from StageHubAPI.model import Account
|
||||
from StageHubAPI.response import ApiResponse
|
||||
|
||||
account_bp = Blueprint('account', __name__)
|
||||
api = Api(account_bp)
|
||||
|
||||
|
||||
class Schema(SQLAlchemyAutoSchema):
|
||||
class Meta:
|
||||
model = Account
|
||||
include_relationships = True
|
||||
include_fk = True
|
||||
load_instance = True
|
||||
|
||||
|
||||
account_schema = Schema()
|
||||
accounts_schema = Schema(many=True)
|
||||
|
||||
|
||||
def parse_args():
|
||||
pass
|
||||
|
||||
|
||||
class Accounts(Resource):
|
||||
def get(self):
|
||||
arguments = ['login', 'mail', 'id']
|
||||
for arg in request.args.keys():
|
||||
if arg not in arguments:
|
||||
return ApiResponse(message='Unknown argument : %s' % arg,
|
||||
objects=[],
|
||||
status=200).as_dict()
|
||||
|
||||
login = request.args.get("login")
|
||||
mail = request.args.get("mail")
|
||||
aid = request.args.get("id")
|
||||
|
||||
if login:
|
||||
result = Account.query.filter(Account.login.like(login))
|
||||
return account_schema.dump(result, many=True), 200
|
||||
|
||||
if mail:
|
||||
result = Account.query.filter(Account.mail.like(mail))
|
||||
response = ApiResponse(message='Success',
|
||||
objects=account_schema.dump(result, many=True),
|
||||
status=200)
|
||||
return response.as_dict()
|
||||
|
||||
if aid:
|
||||
result = Account.query.filter(Account.id == aid)
|
||||
response = ApiResponse(message='Success',
|
||||
objects=account_schema.dump(result, many=True),
|
||||
status=200)
|
||||
return response.as_dict()
|
||||
# return account_schema.dump(result, many=True), 200
|
||||
|
||||
return accounts_schema.dump(Account.query.all(), many=True), 200
|
||||
|
||||
def post(self, name):
|
||||
pass
|
||||
|
||||
|
||||
class AccountByName(Resource):
|
||||
def get(self, name):
|
||||
if '@' in name:
|
||||
# Email filter
|
||||
result = Account.query.filter(Account.mail.like(name)).first()
|
||||
elif name.isdigit():
|
||||
result = Account.query.filter(Account.id == name).first()
|
||||
else:
|
||||
result = Account.query.filter(Account.login.like(name)).first()
|
||||
return account_schema.dump(result), 200
|
||||
|
||||
|
||||
api.add_resource(Accounts, '/accounts')
|
||||
api.add_resource(AccountByName, '/accounts/<string:name>')
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
from flask import Blueprint, Response, jsonify, render_template, url_for
|
||||
from StageHubAPI import Api, Resource, app, ma, SQLAlchemyAutoSchema
|
||||
|
||||
main_bp = Blueprint('main', __name__)
|
||||
api = Api(main_bp)
|
||||
|
||||
@main_bp.route('/')
|
||||
def main():
|
||||
return jsonify({'name':'StageHub API', 'status':'200', 'message':'Success'})
|
||||
|
|
@ -0,0 +1,443 @@
|
|||
from StageHubAPI import db
|
||||
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
# from dataclasses import dataclass
|
||||
import datetime
|
||||
import hashlib
|
||||
|
||||
|
||||
class Studio(db.Model):
|
||||
__tablename__ = 'studio'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
name = db.Column(db.String) # Studio Name
|
||||
code_name = db.Column(db.String(64)) # cgtp, cgxm, bgs
|
||||
description = db.Column(db.Text)
|
||||
image_id = db.Column(db.Integer, default=0)
|
||||
type = db.Column(db.String(32)) # internal, outsource, client
|
||||
meta_data = db.Column(db.Text)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{type(self).__name__}(name='{self.name}')>"
|
||||
|
||||
|
||||
# @dataclass
|
||||
class Project(db.Model):
|
||||
# id: int
|
||||
# name: str
|
||||
# code_name: str
|
||||
# description: str
|
||||
# type: str
|
||||
# vendor: str
|
||||
# image_id: int
|
||||
# enabled: bool
|
||||
|
||||
__tablename__ = 'project'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
name = db.Column(db.String)
|
||||
code_name = db.Column(db.String(64))
|
||||
description = db.Column(db.Text)
|
||||
type = db.Column(db.String(64), default='tv')
|
||||
vendor_id = db.Column(db.Integer, ForeignKey('vendor.id'), nullable=True)
|
||||
vendor = relationship('Vendor', back_populates='projects')
|
||||
image_id = db.Column(db.Integer, default=0)
|
||||
enabled = db.Column(db.Boolean, default=True)
|
||||
parent_id = db.Column(db.Integer, nullable=True)
|
||||
create_on = db.Column(db.DateTime, default=datetime.datetime.now)
|
||||
update_on = db.Column(db.DateTime, default=datetime.datetime.now, onupdate=datetime.datetime.now, nullable=False)
|
||||
private = db.Column(db.Boolean, default=False)
|
||||
central_id = db.Column(db.Integer, ForeignKey('studio.id'), nullable=True)
|
||||
studio_id = db.Column(db.Integer, ForeignKey('studio.id'), nullable=True)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{type(self).__name__}(name='{self.name}')>"
|
||||
|
||||
|
||||
class Vendor(db.Model):
|
||||
__tablename__ = 'vendor'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
name = db.Column(db.String)
|
||||
code_name = db.Column(db.String(64))
|
||||
description = db.Column(db.Text)
|
||||
projects = relationship('Project')
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{type(self).__name__}(name='{self.name}')>"
|
||||
|
||||
|
||||
class ProjectDrive(db.Model):
|
||||
__tablename__ = 'project_drive'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
project_id = db.Column(db.Integer, ForeignKey('project.id'))
|
||||
studio_id = db.Column(db.Integer, ForeignKey('studio.id'))
|
||||
platform = db.Column(db.String(10)) # win32, linux, macosx
|
||||
type = db.Column(db.String, nullable=False) # data, image
|
||||
data_server = db.Column(db.String)
|
||||
data_source = db.Column(db.String)
|
||||
filesystem = db.Column(db.String) # samba, nfs
|
||||
mount_point = db.Column(db.String)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{type(self).__name__}>"
|
||||
|
||||
|
||||
class AssetCategory(db.Model):
|
||||
__tablename__ = 'asset_category'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
name = db.Column(db.String(32))
|
||||
parent_id = db.Column(db.Integer)
|
||||
description = db.Column(db.Text)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{type(self).__name__}(name='{self.name}')>"
|
||||
|
||||
|
||||
class Asset(db.Model):
|
||||
__tablename__ = 'asset'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
project_id = db.Column(db.Integer)
|
||||
image_id = db.Column(db.Integer, default=0)
|
||||
type = db.Column(db.String(32)) # char, prop, set
|
||||
name = db.Column(db.String(64))
|
||||
parent_id = db.Column(db.Integer)
|
||||
description = db.Column(db.Text)
|
||||
meta_data = db.Column(db.Text)
|
||||
enabled = db.Column(db.Boolean, default=True)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{type(self).__name__}(name='{self.name}')>"
|
||||
|
||||
|
||||
class Episode(db.Model):
|
||||
__tablename__ = 'episode'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
name = db.Column(db.String(32))
|
||||
project_id = db.Column(db.Integer, ForeignKey('project.id'))
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{type(self).__name__}(name='{self.name}')>"
|
||||
|
||||
|
||||
class Stage(db.Model):
|
||||
__tablename__ = 'stage'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
# modeling, surfacing, rigging, matte_painting, layout, animation, lighting, compositing
|
||||
name = db.Column(db.String(64))
|
||||
# MDL, SFC, RIG, DMP, CFX_HAIR, CFX_XGEN, LAY, SIM, FX, DRS, LGT, COMP
|
||||
code_name = db.Column(db.String(64))
|
||||
parent_id = db.Column(db.Integer, nullable=True) # Parent stage id
|
||||
type = db.Column(db.String(64)) # asset, shot
|
||||
description = db.Column(db.Text)
|
||||
|
||||
|
||||
class Step(db.Model):
|
||||
__tablename__ = 'step'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
# Primary, Secondary, Animation, Blocking
|
||||
name = db.Column(db.String(64))
|
||||
# PRI, SEC, ANI, BLK
|
||||
code_name = db.Column(db.String(64))
|
||||
parent_id = db.Column(db.Integer, nullable=True) # Parent step id
|
||||
description = db.Column(db.Text)
|
||||
|
||||
|
||||
class ProjectStage(db.Model):
|
||||
__tablename__ = 'project_stage'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
project_id = db.Column(db.Integer, ForeignKey('project.id'))
|
||||
stage_id = db.Column(db.Integer, ForeignKey('stage.id'))
|
||||
steps = relationship('ProjectStep')
|
||||
|
||||
|
||||
class ProjectStep(db.Model):
|
||||
__tablename__ = 'project_step'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
project_stage_id = db.Column(db.Integer, ForeignKey('project_stage.id'))
|
||||
project_stage = relationship('ProjectStage', back_populates='steps')
|
||||
step_id = db.Column(db.Integer, ForeignKey('step.id'))
|
||||
|
||||
|
||||
class ProjectGroup(db.Model):
|
||||
__tablename__ = 'project_group'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
project_id = db.Column(db.Integer, ForeignKey('project.id'))
|
||||
group_id = db.Column(db.Integer, ForeignKey('group.id'))
|
||||
|
||||
|
||||
class Status(db.Model):
|
||||
__tablename__ = 'status'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
name = db.Column(db.String)
|
||||
description = db.Column(db.Text)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{type(self).__name__}(name='{self.name}')>"
|
||||
|
||||
|
||||
class Shot(db.Model):
|
||||
__tablename__ = 'shot'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
project_id = db.Column(db.Integer, ForeignKey('project.id'))
|
||||
name = db.Column(db.String)
|
||||
type = db.Column(db.String(10)) # main, screen, option
|
||||
status = db.Column(db.String(10)) # pending, wip, approve
|
||||
stage = db.Column(db.String(20))
|
||||
image_id = db.Column(db.Integer, default=0) # thumbnil
|
||||
reuse_id = db.Column(db.Integer, nullable=True) # Reuse shot id
|
||||
parent_id = db.Column(db.Integer)
|
||||
timecode = db.Column(db.String)
|
||||
description = db.Column(db.Text)
|
||||
meta_data = db.Column(db.Text)
|
||||
enabled = db.Column(db.Boolean, default=True)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{type(self).__name__}(name='{self.name}')>"
|
||||
|
||||
|
||||
class ShotStage(db.Model):
|
||||
__tablename__ = 'shot_stage'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
shot_id = db.Column(db.Integer, ForeignKey('shot.id'))
|
||||
stage_id = db.Column(db.Integer, ForeignKey('stage.id'))
|
||||
|
||||
|
||||
class ShotEpisode(db.Model):
|
||||
__tablename__ = 'shot_episode'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
shot_id = db.Column(db.Integer)
|
||||
episode_id = db.Column(db.Integer)
|
||||
|
||||
|
||||
class ShotCategory(db.Model):
|
||||
__tablename__ = 'shot_category'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
shot_id = db.Column(db.Integer)
|
||||
category_id = db.Column(db.Integer)
|
||||
|
||||
|
||||
class Category(db.Model):
|
||||
__tablename__ = 'category'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
name = db.Column(db.String)
|
||||
entry_type = db.Column(db.String(64)) # shot, asset
|
||||
type = db.Column(db.String(64)) # act, sequence
|
||||
parent_id = db.Column(db.Integer)
|
||||
description = db.Column(db.Text)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{type(self).__name__}(name='{self.name}')>"
|
||||
|
||||
|
||||
class TaskStatus(db.Model):
|
||||
__tablename__ = 'task_status'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
name = db.Column(db.String)
|
||||
description = db.Column(db.Text)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{type(self).__name__}(name='{self.name}')>"
|
||||
|
||||
|
||||
class Task(db.Model):
|
||||
__tablename__ = 'task'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
source_id = db.Column(db.Integer) # source_id
|
||||
type = db.Column(db.String(20)) # asset, shot
|
||||
name = db.Column(db.String(30)) # task name, usually same as stage name
|
||||
stage = db.Column(db.String(30)) # modeling, surfacing, layout, animation, lighting, simulation, fx, compositing
|
||||
step = db.Column(db.String(30), default='', nullable=True) # Ani, Pri, Sec
|
||||
description = db.Column(db.Text)
|
||||
parent_id = db.Column(db.Integer) # parent task id
|
||||
status = db.Column(db.String(64))
|
||||
author_id = db.Column(db.Integer, ForeignKey('account.id'))
|
||||
start_date = db.Column(db.Date)
|
||||
end_date = db.Column(db.Date)
|
||||
assign_to = db.Column(db.Integer, ForeignKey('account.id')) # account id assign to
|
||||
create_on = db.Column(db.DateTime, default=datetime.datetime.now)
|
||||
update_on = db.Column(db.DateTime, default=datetime.datetime.now, onupdate=datetime.datetime.now)
|
||||
private = db.Column(db.Boolean, default=False) # only author and assignee can see it
|
||||
percent = db.Column(db.Integer, default=0) # progress percentage (0-100%)
|
||||
use_check_list = db.Column(db.Boolean, default=False)
|
||||
check_list = relationship('CheckList')
|
||||
attachment = relationship('Attachment')
|
||||
active = db.Column(db.Boolean, default=True)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{type(self).__name__}(type='{self.type}', name='self.name')>"
|
||||
|
||||
|
||||
class CheckList(db.Model):
|
||||
__tablename__ = 'check_list'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
name = db.Column(db.String)
|
||||
checked = db.Column(db.Boolean, default=False)
|
||||
task_id = db.Column(db.Integer, ForeignKey('task.id'))
|
||||
task = relationship("Task", back_populates="check_list")
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{type(self).__name__}(type='{self.type}', name='self.name')>"
|
||||
|
||||
|
||||
class ShotDelivery(db.Model):
|
||||
__tablename__ = 'shot_delivery'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
author_id = db.Column(db.Integer, ForeignKey('account.id'))
|
||||
delivery_to = db.Column(db.Integer, ForeignKey('account.id'))
|
||||
create_on = db.Column(db.DateTime, default=datetime.datetime.now)
|
||||
note_id = db.Column(db.Integer, ForeignKey('note.id'))
|
||||
attachment_id = db.Column(db.Integer, ForeignKey('attachment.id'))
|
||||
content_path = db.Column(db.String(255))
|
||||
description = db.Column(db.Text)
|
||||
meta_data = db.Column(db.Text)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{type(self).__name__}(type='{self.type}', name='self.name')>"
|
||||
|
||||
|
||||
class Account(db.Model):
|
||||
__tablename__ = 'account'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
login = db.Column(db.String(64))
|
||||
image_id = db.Column(db.Integer, ForeignKey('image.id'), nullable=True)
|
||||
first_name = db.Column(db.String(64))
|
||||
last_name = db.Column(db.String(64))
|
||||
type = db.Column(db.String(10)) # inhouse, external, client, api
|
||||
studio_id = db.Column(db.String(64), ForeignKey('studio.id'), nullable=True) # studio name
|
||||
mail = db.Column(db.String(255))
|
||||
password = db.Column(db.String(255))
|
||||
auth_type = db.Column(db.Integer, default=0) # 0: internal, 1: LDAP
|
||||
admin = db.Column(db.Boolean, default=False)
|
||||
create_on = db.Column(db.DateTime, default=datetime.datetime.now)
|
||||
update_on = db.Column(db.DateTime, default=datetime.datetime.now, onupdate=datetime.datetime.now)
|
||||
last_login = db.Column(db.DateTime)
|
||||
use_expire = db.Column(db.Boolean, default=False)
|
||||
expire_on = db.Column(db.DateTime)
|
||||
locked = db.Column(db.Boolean, default=False)
|
||||
activate = db.Column(db.Boolean, default=True)
|
||||
token = db.Column(db.String(255))
|
||||
description = db.Column(db.Text)
|
||||
meta_data = db.Column(db.Text)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{type(self).__name__}(login={self.name})>"
|
||||
|
||||
|
||||
class Group(db.Model):
|
||||
__tablename__ = 'group'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
name = db.Column(db.String(64))
|
||||
type = db.Column(db.String(16)) # department, notify, watch
|
||||
public = db.Column(db.Boolean, default=True)
|
||||
description = db.Column(db.Text)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{type(self).__name__}(login={self.name})>"
|
||||
|
||||
|
||||
class AccountGroup(db.Model):
|
||||
__tablename__ = 'account_group'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
account_id = db.Column(db.Integer)
|
||||
group_id = db.Column(db.Integer)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{type(self).__name__}>"
|
||||
|
||||
|
||||
class Membership(db.Model):
|
||||
__tablename__ = 'membership'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
name = db.Column(db.String)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{type(self).__name__}>"
|
||||
|
||||
|
||||
class Permission(db.Model):
|
||||
__tablename__ = 'permission'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
memership_id = db.Column(db.Integer)
|
||||
|
||||
view_project = db.Column(db.Boolean)
|
||||
create_project = db.Column(db.Boolean)
|
||||
edit_project = db.Column(db.Boolean)
|
||||
delete_project = db.Column(db.Boolean)
|
||||
|
||||
create_asset = db.Column(db.Boolean)
|
||||
view_asset = db.Column(db.Boolean)
|
||||
edit_asset = db.Column(db.Boolean)
|
||||
publish_asset = db.Column(db.Boolean)
|
||||
delete_asset = db.Column(db.Boolean)
|
||||
|
||||
view_shot = db.Column(db.Boolean)
|
||||
create_shot = db.Column(db.Boolean)
|
||||
edit_shot = db.Column(db.Boolean)
|
||||
publish_shot = db.Column(db.Boolean)
|
||||
delete_shot = db.Column(db.Boolean)
|
||||
|
||||
create_asset_task = db.Column(db.Boolean)
|
||||
view_asset_task = db.Column(db.Boolean)
|
||||
edit_asset_task = db.Column(db.Boolean)
|
||||
delete_asset_task = db.Column(db.Boolean)
|
||||
|
||||
create_shot_task = db.Column(db.Boolean)
|
||||
view_shot_task = db.Column(db.Boolean)
|
||||
edit_shot_task = db.Column(db.Boolean)
|
||||
delete_shot_task = db.Column(db.Boolean)
|
||||
|
||||
create_note = db.Column(db.Boolean)
|
||||
view_note = db.Column(db.Boolean)
|
||||
edit_note = db.Column(db.Boolean)
|
||||
delete_note = db.Column(db.Boolean)
|
||||
|
||||
create_attachment = db.Column(db.Boolean)
|
||||
view_attachment = db.Column(db.Boolean)
|
||||
edit_attachment = db.Column(db.Boolean)
|
||||
delete_attachment = db.Column(db.Boolean)
|
||||
|
||||
notify = db.Column(db.Boolean)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{type(self).__name__}>"
|
||||
|
||||
|
||||
class Attachment(db.Model):
|
||||
__tablename__ = 'attachment'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
filename = db.Column(db.String(255))
|
||||
file_size = db.Column(db.BigInteger)
|
||||
disk_filename = db.Column(db.String(255))
|
||||
disk_directory = db.Column(db.String(255))
|
||||
content_type = db.Column(db.String(255))
|
||||
digest = db.Column(db.String(64))
|
||||
author_id = db.Column(db.Integer)
|
||||
created_date = db.Column(db.DateTime)
|
||||
description = db.Column(db.Text)
|
||||
task_id = db.Column(db.Integer, ForeignKey('task.id'))
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{type(self).__name__}(filename='{self.filename}')>"
|
||||
|
||||
|
||||
class Image(db.Model):
|
||||
__tablename__ = 'image'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
filename = db.Column(db.String(255))
|
||||
hash = db.Column(db.String(255))
|
||||
file_size = db.Column(db.BigInteger)
|
||||
disk_filename = db.Column(db.String(255))
|
||||
width = db.Column(db.Integer)
|
||||
height = db.Column(db.Integer)
|
||||
created_date = db.Column(db.DateTime)
|
||||
|
||||
|
||||
class Note(db.Model):
|
||||
__tablename__ = 'note'
|
||||
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||
entry_type = db.Column(db.String) # asset, shot
|
||||
entry_id = db.Column(db.Integer) # asset_id, shot_id
|
||||
attatch_id = db.Column(db.Integer, nullable=True)
|
||||
enabled = db.Column(db.Boolean)
|
||||
rich = db.Column(db.Boolean) # rich format
|
||||
text = db.Column(db.Text)
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
import json
|
||||
|
||||
from flask import Blueprint, Response, jsonify, render_template, url_for
|
||||
from StageHubAPI import Api, Resource, app, ma, SQLAlchemyAutoSchema
|
||||
from StageHubAPI.model import Project
|
||||
from StageHubAPI.response import ApiResponse
|
||||
|
||||
project_bp = Blueprint('project', __name__)
|
||||
api = Api(project_bp)
|
||||
|
||||
|
||||
class Schema(SQLAlchemyAutoSchema):
|
||||
class Meta:
|
||||
model = Project
|
||||
include_relationships = True
|
||||
include_fk = True
|
||||
load_instance = True
|
||||
|
||||
|
||||
project_schema = Schema()
|
||||
projects_schema = Schema(many=True)
|
||||
|
||||
|
||||
class ProjectList(Resource):
|
||||
def get(self):
|
||||
return projects_schema.dump(Project.query.all()), 200
|
||||
|
||||
|
||||
class ProjectByName(Resource):
|
||||
def get(self, name):
|
||||
p = Project.query.filter(Project.name.like(name)).first()
|
||||
return project_schema.dump(p)
|
||||
|
||||
|
||||
class ProjectById(Resource):
|
||||
def get(self, id):
|
||||
p = Project.query.filter_by(id=id).first()
|
||||
return project_schema.dump(p)
|
||||
|
||||
|
||||
api.add_resource(ProjectList, '/projects')
|
||||
api.add_resource(ProjectByName, '/projects/<string:name>')
|
||||
api.add_resource(ProjectById, '/projects/<int:id>')
|
||||
|
||||
# @project_bp.route('/projects/')
|
||||
# def projects():
|
||||
# projects = Project.query.all()
|
||||
# r = ApiResponse(objects=projects)
|
||||
# r.message = 'success'
|
||||
# return r.as_response()
|
||||
|
||||
# @project_bp.route('/projects/<string:project_name>')
|
||||
# def project(project_name):
|
||||
# p = Project.query.filter_by(name=project_name).first()
|
||||
# r = ApiResponse(objects=[p])
|
||||
# r.message = 'success'
|
||||
# return r.as_response()
|
||||
|
|
@ -0,0 +1,115 @@
|
|||
import json
|
||||
from flask import Response
|
||||
from StageHubAPI import db
|
||||
from StageHubAPI.model import *
|
||||
from sqlalchemy.orm import class_mapper
|
||||
|
||||
class JsonResponse(object):
|
||||
def __init__(self, message='', objects=[],
|
||||
status=200, mimetype='application/json'):
|
||||
self._message = message
|
||||
self._objects = objects
|
||||
self._mimetype = mimetype
|
||||
self._status = status
|
||||
|
||||
@property
|
||||
def num_result(self):
|
||||
return len(self._objects)
|
||||
|
||||
@property
|
||||
def objects(self):
|
||||
return self._objects
|
||||
|
||||
@objects.setter
|
||||
def objects(self, value):
|
||||
self._objects = value
|
||||
|
||||
@property
|
||||
def message(self):
|
||||
return self._message
|
||||
|
||||
@message.setter
|
||||
def message(self, value):
|
||||
self._message = value
|
||||
|
||||
@property
|
||||
def status(self):
|
||||
return self._status
|
||||
|
||||
@status.setter
|
||||
def status(self, value):
|
||||
self._status = value
|
||||
|
||||
class ApiResponse(object):
|
||||
def __init__(self, message='', objects=[],
|
||||
status=200, mimetype='application/json'):
|
||||
self._message = message
|
||||
self._objects = [self.object_to_dict(o) for o in objects]
|
||||
self._mimetype = mimetype
|
||||
self._status = status
|
||||
|
||||
def object_to_dict(self, obj, found=None):
|
||||
if isinstance(obj, dict):
|
||||
return obj
|
||||
if found is None:
|
||||
found = set()
|
||||
mapper = class_mapper(obj.__class__)
|
||||
columns = [column.key for column in mapper.columns]
|
||||
|
||||
def get_key_value(c): return (c, getattr(obj, c).isoformat()) \
|
||||
if isinstance(getattr(obj, c), db.DateTime) or isinstance(getattr(obj, c), datetime.datetime) \
|
||||
else (c, getattr(obj, c))
|
||||
|
||||
out = dict(map(get_key_value, columns))
|
||||
for name, relation in mapper.relationships.items():
|
||||
if relation not in found:
|
||||
found.add(relation)
|
||||
related_obj = getattr(obj, name)
|
||||
if related_obj is not None:
|
||||
if relation.uselist:
|
||||
out[name] = [self.object_to_dict(
|
||||
child, found) for child in related_obj]
|
||||
else:
|
||||
out[name] = self.object_to_dict(related_obj, found)
|
||||
return out
|
||||
|
||||
@property
|
||||
def num_result(self):
|
||||
return len(self._objects)
|
||||
|
||||
@property
|
||||
def objects(self):
|
||||
return self._objects
|
||||
|
||||
@objects.setter
|
||||
def objects(self, value):
|
||||
self._objects = value
|
||||
|
||||
@property
|
||||
def message(self):
|
||||
return self._message
|
||||
|
||||
@message.setter
|
||||
def message(self, value):
|
||||
self._message = value
|
||||
|
||||
@property
|
||||
def status(self):
|
||||
return self._status
|
||||
|
||||
@status.setter
|
||||
def status(self, value):
|
||||
self._status = value
|
||||
|
||||
def as_response(self, indent=0):
|
||||
return Response(response=self.as_json(indent=indent),
|
||||
status=self._status,
|
||||
mimetype=self._mimetype)
|
||||
|
||||
def as_dict(self):
|
||||
return dict(num_result=self.num_result,
|
||||
objects=self._objects,
|
||||
message=self._message)
|
||||
|
||||
def as_json(self, indent=0):
|
||||
return json.dumps(self.as_dict(), indent=indent)
|
||||
Binary file not shown.
|
|
@ -0,0 +1,146 @@
|
|||
/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
|
||||
|
||||
/* Greenlet object interface */
|
||||
|
||||
#ifndef Py_GREENLETOBJECT_H
|
||||
#define Py_GREENLETOBJECT_H
|
||||
|
||||
#include <Python.h>
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
/* This is deprecated and undocumented. It does not change. */
|
||||
#define GREENLET_VERSION "1.0.0"
|
||||
|
||||
typedef struct _greenlet {
|
||||
PyObject_HEAD
|
||||
char* stack_start;
|
||||
char* stack_stop;
|
||||
char* stack_copy;
|
||||
intptr_t stack_saved;
|
||||
struct _greenlet* stack_prev;
|
||||
struct _greenlet* parent;
|
||||
PyObject* run_info;
|
||||
struct _frame* top_frame;
|
||||
int recursion_depth;
|
||||
PyObject* weakreflist;
|
||||
#if PY_VERSION_HEX >= 0x030700A3
|
||||
_PyErr_StackItem* exc_info;
|
||||
_PyErr_StackItem exc_state;
|
||||
#else
|
||||
PyObject* exc_type;
|
||||
PyObject* exc_value;
|
||||
PyObject* exc_traceback;
|
||||
#endif
|
||||
PyObject* dict;
|
||||
#if PY_VERSION_HEX >= 0x030700A3
|
||||
PyObject* context;
|
||||
#endif
|
||||
#if PY_VERSION_HEX >= 0x30A00B1
|
||||
CFrame* cframe;
|
||||
#endif
|
||||
} PyGreenlet;
|
||||
|
||||
#define PyGreenlet_Check(op) PyObject_TypeCheck(op, &PyGreenlet_Type)
|
||||
#define PyGreenlet_MAIN(op) (((PyGreenlet*)(op))->stack_stop == (char*)-1)
|
||||
#define PyGreenlet_STARTED(op) (((PyGreenlet*)(op))->stack_stop != NULL)
|
||||
#define PyGreenlet_ACTIVE(op) (((PyGreenlet*)(op))->stack_start != NULL)
|
||||
#define PyGreenlet_GET_PARENT(op) (((PyGreenlet*)(op))->parent)
|
||||
|
||||
/* C API functions */
|
||||
|
||||
/* Total number of symbols that are exported */
|
||||
#define PyGreenlet_API_pointers 8
|
||||
|
||||
#define PyGreenlet_Type_NUM 0
|
||||
#define PyExc_GreenletError_NUM 1
|
||||
#define PyExc_GreenletExit_NUM 2
|
||||
|
||||
#define PyGreenlet_New_NUM 3
|
||||
#define PyGreenlet_GetCurrent_NUM 4
|
||||
#define PyGreenlet_Throw_NUM 5
|
||||
#define PyGreenlet_Switch_NUM 6
|
||||
#define PyGreenlet_SetParent_NUM 7
|
||||
|
||||
#ifndef GREENLET_MODULE
|
||||
/* This section is used by modules that uses the greenlet C API */
|
||||
static void** _PyGreenlet_API = NULL;
|
||||
|
||||
# define PyGreenlet_Type \
|
||||
(*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM])
|
||||
|
||||
# define PyExc_GreenletError \
|
||||
((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM])
|
||||
|
||||
# define PyExc_GreenletExit \
|
||||
((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM])
|
||||
|
||||
/*
|
||||
* PyGreenlet_New(PyObject *args)
|
||||
*
|
||||
* greenlet.greenlet(run, parent=None)
|
||||
*/
|
||||
# define PyGreenlet_New \
|
||||
(*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \
|
||||
_PyGreenlet_API[PyGreenlet_New_NUM])
|
||||
|
||||
/*
|
||||
* PyGreenlet_GetCurrent(void)
|
||||
*
|
||||
* greenlet.getcurrent()
|
||||
*/
|
||||
# define PyGreenlet_GetCurrent \
|
||||
(*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM])
|
||||
|
||||
/*
|
||||
* PyGreenlet_Throw(
|
||||
* PyGreenlet *greenlet,
|
||||
* PyObject *typ,
|
||||
* PyObject *val,
|
||||
* PyObject *tb)
|
||||
*
|
||||
* g.throw(...)
|
||||
*/
|
||||
# define PyGreenlet_Throw \
|
||||
(*(PyObject * (*)(PyGreenlet * self, \
|
||||
PyObject * typ, \
|
||||
PyObject * val, \
|
||||
PyObject * tb)) \
|
||||
_PyGreenlet_API[PyGreenlet_Throw_NUM])
|
||||
|
||||
/*
|
||||
* PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args)
|
||||
*
|
||||
* g.switch(*args, **kwargs)
|
||||
*/
|
||||
# define PyGreenlet_Switch \
|
||||
(*(PyObject * \
|
||||
(*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \
|
||||
_PyGreenlet_API[PyGreenlet_Switch_NUM])
|
||||
|
||||
/*
|
||||
* PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent)
|
||||
*
|
||||
* g.parent = new_parent
|
||||
*/
|
||||
# define PyGreenlet_SetParent \
|
||||
(*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \
|
||||
_PyGreenlet_API[PyGreenlet_SetParent_NUM])
|
||||
|
||||
/* Macro that imports greenlet and initializes C API */
|
||||
/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we
|
||||
keep the older definition to be sure older code that might have a copy of
|
||||
the header still works. */
|
||||
# define PyGreenlet_Import() \
|
||||
{ \
|
||||
_PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \
|
||||
}
|
||||
|
||||
#endif /* GREENLET_MODULE */
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
#endif /* !Py_GREENLETOBJECT_H */
|
||||
|
|
@ -0,0 +1,206 @@
|
|||
from StageHubAPI import db
|
||||
from StageHubAPI.model import Project, Group, Status, Stage, Step, Episode, Shot, Vendor, Studio, Task, TaskStatus, Account
|
||||
from StageHubAPI.model import ProjectGroup, ProjectStage, ProjectStep
|
||||
import sys
|
||||
import random
|
||||
import hashlib
|
||||
|
||||
db.drop_all()
|
||||
db.create_all()
|
||||
|
||||
s1 = Studio(name='CGCG', code_name='cgtp')
|
||||
|
||||
v1 = Vendor(name='Lucas Animation', code_name='LAL')
|
||||
v2 = Vendor(name='Dream Work Animation', code_name='DWA')
|
||||
|
||||
db.session.add(s1)
|
||||
db.session.add(v1)
|
||||
db.session.add(v2)
|
||||
db.session.flush()
|
||||
|
||||
print(v1.id)
|
||||
|
||||
# db.session.flush()
|
||||
p1 = Project(name='Cookies', code_name='cookies',
|
||||
vendor_id=v1.id, description='Star Wars Season II', studio_id=s1.id)
|
||||
p2 = Project(name='JUR', code_name='jur', vendor_id=v2.id,
|
||||
description='Jurassic Park Season II', studio_id=s1.id)
|
||||
p3 = Project(name='NDR', code_name='ndr', vendor_id=v2.id,
|
||||
description='How to train your dragon Season IV', studio_id=s1.id)
|
||||
|
||||
db.session.add(p1)
|
||||
db.session.add(p2)
|
||||
db.session.add(p3)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
projects = ['Cookies', 'JUR', 'NDR']
|
||||
|
||||
eps = {'Cookies': ['EP01', 'EP02', 'EP03'],
|
||||
'JUR': ['JUR_001', 'JUR_002', 'JUR_003'],
|
||||
'NDR': ['NDR_101', 'NDR_102', 'NDR_103']}
|
||||
|
||||
act = ['']
|
||||
|
||||
for p in projects:
|
||||
p_id = projects.index(p) + 1
|
||||
for e in eps.get(p):
|
||||
episode = Episode(name=e, project_id=p_id)
|
||||
db.session.add(episode)
|
||||
|
||||
db.session.flush()
|
||||
|
||||
status = []
|
||||
st1 = Status(name='ready', description='Ready to Start')
|
||||
st2 = Status(name='wip', description='Working in Progress')
|
||||
st3 = Status(name='finished', description='Finished')
|
||||
st4 = Status(name='cbb', description='Could be Better')
|
||||
|
||||
status.append(st1)
|
||||
status.append(st2)
|
||||
status.append(st3)
|
||||
status.append(st4)
|
||||
for s in status:
|
||||
db.session.add(s)
|
||||
|
||||
db.session.flush()
|
||||
|
||||
task_status = []
|
||||
rds = TaskStatus(name='ready', description='Ready to Start')
|
||||
wip = TaskStatus(name='wip', description='Working in Progress')
|
||||
pnd = TaskStatus(name='pending', description='Pending')
|
||||
rtk = TaskStatus(name='retake', description='Retake')
|
||||
apr = TaskStatus(name='approve', description='Approve')
|
||||
cbb = TaskStatus(name='cbb', description='Could be Better')
|
||||
|
||||
task_status.append(rds)
|
||||
task_status.append(wip)
|
||||
task_status.append(pnd)
|
||||
task_status.append(rtk)
|
||||
task_status.append(apr)
|
||||
task_status.append(cbb)
|
||||
|
||||
for ts in task_status:
|
||||
db.session.add(ts)
|
||||
|
||||
# Stages
|
||||
stages = []
|
||||
mdl = Stage(name='modeling', code_name='MDL', type='assets')
|
||||
stages.append(mdl)
|
||||
sfc = Stage(name='surfacing', code_name='SFC', type='assets')
|
||||
stages.append(sfc)
|
||||
rig = Stage(name='rigging', code_name='RIG', type='assets')
|
||||
stages.append(rig)
|
||||
dmp = Stage(name='matte_painting', code_name='DMP', type='assets')
|
||||
stages.append(dmp)
|
||||
hair = Stage(name='hair', code_name='HAIR', type='assets')
|
||||
stages.append(hair)
|
||||
xgen = Stage(name='xgen', code_name='XGEN', type='assets')
|
||||
stages.append(xgen)
|
||||
ani = Stage(name='animation', code_name='ANI', type='shots')
|
||||
stages.append(ani)
|
||||
lay = Stage(name='layout', code_name='LAY', type='shots')
|
||||
stages.append(lay)
|
||||
sim = Stage(name='simulation', code_name='SIM', type='shots')
|
||||
stages.append(sim)
|
||||
fx = Stage(name='fx', code_name='FX', type='shots')
|
||||
stages.append(fx)
|
||||
drs = Stage(name='dressing', code_name='DRS', type='shots')
|
||||
stages.append(drs)
|
||||
lgt = Stage(name='lighting', code_name='LGT', type='shots')
|
||||
stages.append(lgt)
|
||||
cmp = Stage(name='compositing', code_name='COMP', type='shots')
|
||||
stages.append(cmp)
|
||||
|
||||
for stg in stages:
|
||||
db.session.add(stg)
|
||||
db.session.flush()
|
||||
|
||||
stp_blk = Step(name='Blocking', code_name='BLK')
|
||||
db.session.add(stp_blk)
|
||||
stp_pri = Step(name='Primary', code_name='PRI')
|
||||
db.session.add(stp_pri)
|
||||
stp_sec = Step(name='Secondary', code_name='SEC')
|
||||
db.session.add(stp_sec)
|
||||
stp_ani = Step(name='Animation', code_name='ANI')
|
||||
db.session.add(stp_ani)
|
||||
|
||||
|
||||
# Project Stage
|
||||
p1_ani = ProjectStage(project_id=p1.id, stage_id=ani.id)
|
||||
db.session.add(p1_ani)
|
||||
p2_ani = ProjectStage(project_id=p2.id, stage_id=ani.id)
|
||||
db.session.add(p2_ani)
|
||||
|
||||
db.session.flush()
|
||||
|
||||
# Steps
|
||||
p1_ani_blk = ProjectStep(project_stage_id=p1_ani.id, step_id=stp_blk.id)
|
||||
db.session.add(p1_ani_blk)
|
||||
p1_ani_ani = ProjectStep(project_stage_id=p1_ani.id, step_id=stp_ani.id)
|
||||
db.session.add(p1_ani_ani)
|
||||
|
||||
p2_ani_blk = ProjectStep(project_stage_id=p2_ani.id, step_id=stp_blk.id)
|
||||
db.session.add(p2_ani_blk)
|
||||
p2_ani_pri = ProjectStep(project_stage_id=p2_ani.id, step_id=stp_pri.id)
|
||||
db.session.add(p2_ani_pri)
|
||||
p2_ani_sec = ProjectStep(project_stage_id=p2_ani.id, step_id=stp_sec.id)
|
||||
db.session.add(p2_ani_sec)
|
||||
p2_ani_ani = ProjectStep(project_stage_id=p2_ani.id, step_id=stp_ani.id)
|
||||
db.session.add(p2_ani_ani)
|
||||
|
||||
db.session.flush()
|
||||
|
||||
a1 = Account(login='indigo', first_name='唐', last_name='慕霖', mail='indigo@cgcg.com.tw', type='inhouse', studio_id=s1.id,
|
||||
password=hashlib.md5('test@1234'.encode('utf8')).hexdigest())
|
||||
a2 = Account(login='admin', first_name='admin', last_name='', mail='admin@cgcg.com.tw', type='inhouse', studio_id=s1.id,
|
||||
password=hashlib.md5('admin@1234'.encode('utf8')).hexdigest(), admin=True)
|
||||
|
||||
db.session.add(a1)
|
||||
db.session.add(a2)
|
||||
|
||||
# Stages
|
||||
groups = []
|
||||
mdl = Group(name='modeling', type='department')
|
||||
groups.append(mdl)
|
||||
sfc = Group(name='surfacing', type='department')
|
||||
groups.append(sfc)
|
||||
rig = Group(name='rigging', type='department')
|
||||
groups.append(rig)
|
||||
dmp = Group(name='matte_painting', type='department')
|
||||
groups.append(dmp)
|
||||
ani = Group(name='animation', type='department')
|
||||
groups.append(ani)
|
||||
lay = Group(name='layout', type='department')
|
||||
groups.append(lay)
|
||||
sim = Group(name='simulation', type='department')
|
||||
groups.append(sim)
|
||||
fx = Group(name='fx', type='department')
|
||||
groups.append(fx)
|
||||
lgt = Group(name='lighting', type='department')
|
||||
groups.append(lgt)
|
||||
cmp = Group(name='compositing', type='department')
|
||||
groups.append(cmp)
|
||||
|
||||
for grp in groups:
|
||||
db.session.add(grp)
|
||||
db.session.flush()
|
||||
|
||||
for p in projects:
|
||||
p_id = projects.index(p) + 1
|
||||
for e in eps.get(p):
|
||||
for i in range(100):
|
||||
shot_name = '{}_{}_{:03d}'.format(p, e, i)
|
||||
shot = Shot(name=shot_name, project_id=p_id, type='main', status=st1.name)
|
||||
db.session.add(shot)
|
||||
db.session.flush()
|
||||
for stg in stages:
|
||||
ts_id = random.randint(0, len(task_status)-1)
|
||||
# print(ts_id, len(task_status))
|
||||
task = Task(source_id=shot.id, name=stg.name, stage=stg.name, type='shot',
|
||||
status=task_status[ts_id].name, author_id=a1.id)
|
||||
db.session.add(task)
|
||||
for grp in groups:
|
||||
db.session.add(ProjectGroup(project_id=p_id, group_id=grp.id))
|
||||
|
||||
db.session.commit()
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
home = C:\Python37
|
||||
implementation = CPython
|
||||
version_info = 3.7.9.final.0
|
||||
virtualenv = 20.4.2
|
||||
include-system-site-packages = false
|
||||
base-prefix = C:\Python37
|
||||
base-exec-prefix = C:\Python37
|
||||
base-executable = C:\Python37\python.exe
|
||||
Loading…
Reference in New Issue