Compare commits
2 Commits
v2.57.0dev
...
v1.37.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
872a09aa53 | ||
|
|
f7fa31e8c0 |
356
.appveyor.yml
356
.appveyor.yml
@@ -1,186 +1,186 @@
|
||||
image:
|
||||
- Ubuntu2204
|
||||
- Visual Studio 2022
|
||||
- macos-catalina
|
||||
for:
|
||||
-
|
||||
matrix:
|
||||
only:
|
||||
- image: Ubuntu2204
|
||||
environment:
|
||||
APPVEYOR_SSH_KEY: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDhb96UEXy8yOy/f+riX/8kKbNx/lOfIZ4pP4Cw3Gj3DmnTwEnxtRtyc+xtaxOsKbt+7+EAXFpCzYX+jHMhtd0QtWB7dbey8DBg31g0f8C5EPquqROibVbhzr/F3f6/d52FFfq6Y/CWaAvLjezvipr+zOOsIFcVusqtXdPJQ/LtUJ0LS5d4lFiw5ELHSxHIpqwGwyb7PbR3ufEFoqbr8eYiCH+vlBob72ArPfo2f3u0sMvpGYmjVVu2jj4FEY2h89sLrGyFdNWBoyumRhkb38+WSAuyPa/Y21+g+S8sRzIlkwbxicGNMtrMIi6zHEIGAgA06Sw2psP807h730PPOVaWjUcU3ojNW8hH3nPizF74pT82+iP7/fFC4PXLP+tBa+8OoHC5yiO7QKUKprMSqVa1qOm8fHbrzglplKJXfzSfUtSE+AQ+HtHhuUWKI+0LBLDrsOJwI5hbsPOAuiZ5I3VfqfAOck6SH9TcmlapVmQEypc7d7oeeUtZSOuIWKXp068= dfx@aw"
|
||||
APPIMAGE_TOOL: appimage-builder-x86_64.AppImage
|
||||
DEPLOY_DIR: AppDir/opt/pyfa
|
||||
# APPVEYOR_SSH_BLOCK: true
|
||||
cache:
|
||||
- /home/appveyor/.cache/pip -> requirements.txt
|
||||
# init:
|
||||
# - sh: curl -sflL 'https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-ssh.sh' | bash -e -
|
||||
install:
|
||||
- sh: sudo DEBIAN_FRONTEND=noninteractive apt-get -y update
|
||||
# AppImage dependencies
|
||||
- sh: sudo DEBIAN_FRONTEND=noninteractive apt-get -y install libfuse2
|
||||
# Preparation script dependencies
|
||||
- sh: sudo DEBIAN_FRONTEND=noninteractive apt-get -y install python3-wxgtk4.0 python3-sqlalchemy python3-logbook
|
||||
before_build:
|
||||
# Prepare pyfa data
|
||||
- sh: find locale/ -type f -name "*.po" -exec msgen "{}" -o "{}" \;
|
||||
- sh: python3 -B scripts/compile_lang.py
|
||||
- sh: python3 -B scripts/dump_crowdin_progress.py
|
||||
- sh: python3 -B db_update.py
|
||||
- sh: export PYFA_VERSION="$(python3 -B scripts/dump_version.py)"
|
||||
- sh: mkdir build
|
||||
# Download packaging tool
|
||||
- sh: curl -o $APPIMAGE_TOOL -L https://github.com/AppImageCrafters/appimage-builder/releases/download/v1.1.0/appimage-builder-1.1.0-x86_64.AppImage
|
||||
- sh: chmod +x $APPIMAGE_TOOL
|
||||
build_script:
|
||||
- sh: mkdir -p AppDir/opt/pyfa
|
||||
- sh: cp -r eos graphs gui imgs locale service utils eve.db config.py pyfa.py db_update.py README.md LICENSE version.yml AppDir/opt/pyfa/
|
||||
- sh: mkdir -p AppDir/usr/share/icons/hicolor/64x64/apps/
|
||||
- sh: cp imgs/gui/pyfa64.png AppDir/usr/share/icons/hicolor/64x64/apps/pyfa.png
|
||||
- sh: ./$APPIMAGE_TOOL --recipe dist_assets/linux/AppImageBuilder.yml
|
||||
after_build:
|
||||
- sh: ls -la
|
||||
artifacts:
|
||||
- path: pyfa-$PYFA_VERSION-linux.AppImage
|
||||
deploy:
|
||||
tag: $PYFA_VERSION
|
||||
release: pyfa $PYFA_VERSION
|
||||
description: 'Release description'
|
||||
provider: GitHub
|
||||
auth_token:
|
||||
secure: M94o0xMtzxrvlKpqMcXU2KfbJdd3aYJ3UxWzePUz/pkT1/Ojiis052CiLsLVyzJg
|
||||
draft: true
|
||||
force_update: false
|
||||
# deploy on tag push only
|
||||
on:
|
||||
APPVEYOR_REPO_TAG: true
|
||||
-
|
||||
matrix:
|
||||
only:
|
||||
- image: Visual Studio 2022
|
||||
environment:
|
||||
PYTHON: "C:\\Python311-x64"
|
||||
# Should be enabled only for build process debugging
|
||||
# init:
|
||||
# - ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
|
||||
cache:
|
||||
- C:\users\appveyor\appdata\local\pip\cache\ -> requirements.txt
|
||||
install:
|
||||
- ps: echo("OS version:")
|
||||
- ps: "[System.Environment]::OSVersion.Version"
|
||||
environment:
|
||||
|
||||
- ps: echo("Filesystem - root:")
|
||||
- ps: "ls \"C:\\\""
|
||||
global:
|
||||
# SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the
|
||||
# /E:ON and /V:ON options are not enabled in the batch script intepreter
|
||||
# See: http://stackoverflow.com/a/13751649/163740
|
||||
CMD_IN_ENV: "cmd /E:ON /V:ON /C .\\appveyor\\run_with_env.cmd"
|
||||
|
||||
- ps: echo("Filesystem - projects root:")
|
||||
- ps: "ls \"C:\\projects\\\""
|
||||
matrix:
|
||||
|
||||
- ps: echo("Filesystem - pyfa root:")
|
||||
- ps: "ls \"C:\\projects\\$env:APPVEYOR_PROJECT_SLUG\\\""
|
||||
# Python 2.7.10 is the latest version and is not pre-installed.
|
||||
|
||||
- ps: echo("Filesystem - installed SDKs:")
|
||||
- ps: "ls \"C:\\Program Files (x86)\\Windows Kits\\\""
|
||||
- PYTHON: "C:\\Python27.10"
|
||||
PYTHON_VERSION: "2.7.10"
|
||||
PYTHON_ARCH: "32"
|
||||
|
||||
# Prepend newly installed Python to the PATH of this build (this cannot be
|
||||
# done from inside the powershell script as it would require to restart
|
||||
# the parent CMD process).
|
||||
- cmd: "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
|
||||
- cmd: "appveyor DownloadFile https://github.com/mlocati/gettext-iconv-windows/releases/download/v0.20.2-v1.16/gettext0.20.2-iconv1.16-shared-64.zip"
|
||||
- cmd: "7z x gettext0.20.2-iconv1.16-shared-64.zip -ogettext"
|
||||
- cmd: "SET PATH=gettext;%PATH%"
|
||||
#- PYTHON: "C:\\Python27.10-x64"
|
||||
# PYTHON_VERSION: "2.7.10"
|
||||
# PYTHON_ARCH: "64"
|
||||
|
||||
- cmd: "python --version"
|
||||
- cmd: "python -c \"import struct; print(struct.calcsize('P') * 8)\""
|
||||
# Pre-installed Python versions, which Appveyor may upgrade to
|
||||
# a later point release.
|
||||
# See: http://www.appveyor.com/docs/installed-software#python
|
||||
|
||||
# Upgrade to the latest version of pip to avoid it displaying warnings
|
||||
# about it being out of date.
|
||||
- cmd: "python -m pip install --upgrade pip"
|
||||
#- PYTHON: "C:\\Python27"
|
||||
# PYTHON_VERSION: "2.7.x" # currently 2.7.9
|
||||
# PYTHON_ARCH: "32"
|
||||
|
||||
# Install the build dependencies of the project. If some dependencies contain
|
||||
# compiled extensions and are not provided as pre-built wheel packages,
|
||||
# pip will build them from source using the MSVC compiler matching the
|
||||
# target Python version and architecture
|
||||
- ps: echo("Install pip requirements:")
|
||||
- cmd: "python -m pip install -r requirements.txt"
|
||||
- cmd: "python -m pip install PyInstaller"
|
||||
before_build:
|
||||
# directory that will contain the built files
|
||||
- ps: $env:PYFA_DIST_DIR = "c:\projects\$env:APPVEYOR_PROJECT_SLUG\dist"
|
||||
- ps: $env:PYFA_VERSION = (python ./scripts/dump_version.py)
|
||||
- ps: echo("pyfa version $env:PYFA_VERSION")
|
||||
build_script:
|
||||
- ps: echo("Build pyfa:")
|
||||
- ps: Get-ChildItem locale/*.po -Recurse -File| Foreach {msgen $_.fullname -o $_.fullname}
|
||||
# Build language files
|
||||
- cmd: "python scripts/compile_lang.py"
|
||||
# Dump language progress
|
||||
- cmd: "python scripts/dump_crowdin_progress.py"
|
||||
# Build gamedata DB
|
||||
- cmd: "python db_update.py"
|
||||
# Build command for PyInstaller
|
||||
- cmd: "python -m PyInstaller --clean -y pyfa.spec"
|
||||
# Copy over manifest (See pyfa-org/pyfa#1622)
|
||||
- ps: xcopy /y dist_assets\win\pyfa.exe.manifest $env:PYFA_DIST_DIR\pyfa\
|
||||
# InnoScript EXE building. This is in a separate script because I don't feel like copying over the logic to AppVeyor script right now...
|
||||
- cmd: "python dist_assets/win/dist.py"
|
||||
- ps: dir $env:PYFA_DIST_DIR/
|
||||
after_build:
|
||||
- ps: "ls \"./\""
|
||||
- ps: 7z a "pyfa-$env:PYFA_VERSION-win.zip" -r "$env:PYFA_DIST_DIR\pyfa\*"
|
||||
artifacts:
|
||||
- path: pyfa*-win.zip
|
||||
- path: pyfa*-win.exe
|
||||
deploy:
|
||||
tag: $(pyfa_version)
|
||||
release: pyfa $(pyfa_version)
|
||||
description: 'Release description'
|
||||
provider: GitHub
|
||||
auth_token:
|
||||
secure: M94o0xMtzxrvlKpqMcXU2KfbJdd3aYJ3UxWzePUz/pkT1/Ojiis052CiLsLVyzJg
|
||||
draft: true
|
||||
force_update: false
|
||||
# deploy on tag push only
|
||||
on:
|
||||
APPVEYOR_REPO_TAG: true
|
||||
-
|
||||
matrix:
|
||||
only:
|
||||
- image: macos-catalina
|
||||
environment:
|
||||
APPVEYOR_SSH_KEY: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDJDW/+oYNGOiPvwuwAL9tc/LQgg58aosIVpMYfepQZ20V+VZnHpZh8IRDA8Jo5xht19p2PksA+hFgqA0kpKtrSkuiWdE8rATQItfk4gf7yB0yGasJGGQZYazy9k/9XtmYkq2HHOOeEqdxvrICddJQ88MLCLT9lJENSUP/YS/yGcjZFXVxE11pTeIcqlCRU+3eYa1v7BeNvXIKNhZoK5orXWrtuH3cy8jrSns/u70aYfJ6B2jA8CnWnDbuvpeQtEY61SQqlKUsSArNa8NAsXj41wr3Ar9gAG9330w7EMTqlutk8HZO35uHI0q5qinUhaQYufPPrVkb2L/N+ZCfu0fnh appveyor"
|
||||
cache:
|
||||
- /Users/appveyor/Library/Caches/pip/ -> requirements.txt
|
||||
init:
|
||||
# - sh: curl -sflL 'https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-ssh.sh' | bash -e -
|
||||
- sh: source ~/venv3.11/bin/activate
|
||||
install:
|
||||
- sh: bash scripts/osx-setup.sh
|
||||
build_script:
|
||||
- sh: bash scripts/osx-translations.sh
|
||||
- sh: python3 scripts/compile_lang.py
|
||||
- sh: python3 scripts/dump_crowdin_progress.py
|
||||
- sh: python3 db_update.py
|
||||
after_build:
|
||||
- sh: export PYFA_VERSION="$(python3 scripts/dump_version.py)"
|
||||
- sh: bash scripts/osx-package.sh
|
||||
# on_finish:
|
||||
# - sh: export APPVEYOR_SSH_BLOCK=true
|
||||
# - sh: curl -sflL 'https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-ssh.sh' | bash -e -
|
||||
artifacts:
|
||||
- path: dist/pyfa*-mac.zip
|
||||
before_deploy:
|
||||
- sh: export RELEASE_PKG_FILE=$(ls *.deb)
|
||||
- sh: echo "deploying $RELEASE_PKG_FILE to GitHub releases"
|
||||
deploy:
|
||||
tag: $PYFA_VERSION
|
||||
release: pyfa $PYFA_VERSION
|
||||
description: 'Release description'
|
||||
provider: GitHub
|
||||
auth_token:
|
||||
secure: M94o0xMtzxrvlKpqMcXU2KfbJdd3aYJ3UxWzePUz/pkT1/Ojiis052CiLsLVyzJg
|
||||
draft: true
|
||||
force_update: false
|
||||
# deploy on tag push only
|
||||
on:
|
||||
APPVEYOR_REPO_TAG: true
|
||||
#- PYTHON: "C:\\Python27-x64"
|
||||
# PYTHON_VERSION: "2.7.x" # currently 2.7.9
|
||||
# PYTHON_ARCH: "64"
|
||||
|
||||
#- PYTHON: "C:\\Python33"
|
||||
# PYTHON_VERSION: "3.3.x" # currently 3.3.5
|
||||
# PYTHON_ARCH: "32"
|
||||
|
||||
#- PYTHON: "C:\\Python33-x64"
|
||||
# PYTHON_VERSION: "3.3.x" # currently 3.3.5
|
||||
# PYTHON_ARCH: "64"
|
||||
|
||||
#- PYTHON: "C:\\Python34"
|
||||
# PYTHON_VERSION: "3.4.x" # currently 3.4.3
|
||||
# PYTHON_ARCH: "32"
|
||||
|
||||
#- PYTHON: "C:\\Python34-x64"
|
||||
# PYTHON_VERSION: "3.4.x" # currently 3.4.3
|
||||
# PYTHON_ARCH: "64"
|
||||
|
||||
# Python versions not pre-installed
|
||||
|
||||
# Python 2.6.6 is the latest Python 2.6 with a Windows installer
|
||||
# See: https://github.com/ogrisel/python-appveyor-demo/issues/10
|
||||
|
||||
#- PYTHON: "C:\\Python266"
|
||||
# PYTHON_VERSION: "2.6.6"
|
||||
# PYTHON_ARCH: "32"
|
||||
|
||||
#- PYTHON: "C:\\Python266-x64"
|
||||
# PYTHON_VERSION: "2.6.6"
|
||||
# PYTHON_ARCH: "64"
|
||||
|
||||
#- PYTHON: "C:\\Python35"
|
||||
# PYTHON_VERSION: "3.5.0"
|
||||
# PYTHON_ARCH: "32"
|
||||
|
||||
#- PYTHON: "C:\\Python35-x64"
|
||||
# PYTHON_VERSION: "3.5.0"
|
||||
# PYTHON_ARCH: "64"
|
||||
|
||||
# Major and minor releases (i.e x.0.0 and x.y.0) prior to 3.3.0 use
|
||||
# a different naming scheme.
|
||||
|
||||
#- PYTHON: "C:\\Python270"
|
||||
# PYTHON_VERSION: "2.7.0"
|
||||
# PYTHON_ARCH: "32"
|
||||
|
||||
#- PYTHON: "C:\\Python270-x64"
|
||||
# PYTHON_VERSION: "2.7.0"
|
||||
# PYTHON_ARCH: "64"
|
||||
|
||||
install:
|
||||
# If there is a newer build queued for the same PR, cancel this one.
|
||||
# The AppVeyor 'rollout builds' option is supposed to serve the same
|
||||
# purpose but it is problematic because it tends to cancel builds pushed
|
||||
# directly to master instead of just PR builds (or the converse).
|
||||
# credits: JuliaLang developers.
|
||||
- ps: if ($env:APPVEYOR_PULL_REQUEST_NUMBER -and $env:APPVEYOR_BUILD_NUMBER -ne ((Invoke-RestMethod `
|
||||
https://ci.appveyor.com/api/projects/$env:APPVEYOR_ACCOUNT_NAME/$env:APPVEYOR_PROJECT_SLUG/history?recordsNumber=50).builds | `
|
||||
Where-Object pullRequestId -eq $env:APPVEYOR_PULL_REQUEST_NUMBER)[0].buildNumber) { `
|
||||
throw "There are newer queued builds for this pull request, failing early." }
|
||||
|
||||
# Install wxPython
|
||||
- 'ECHO Downloading wxPython.'
|
||||
- "appveyor DownloadFile https://goo.gl/yvO8PB -FileName C:\\wxpython.exe"
|
||||
#- "appveyor DownloadFile https://goo.gl/Uj0jV3 -FileName C:\\wxpython64.exe"
|
||||
|
||||
- 'ECHO Install wxPython'
|
||||
- "C:\\wxpython.exe /SP- /VERYSILENT /NORESTART"
|
||||
#- "C:\\wxpython64.exe /SP- /VERYSILENT /NORESTART"
|
||||
|
||||
- ECHO "Filesystem root:"
|
||||
- ps: "ls \"C:/\""
|
||||
|
||||
- ECHO "Filesystem pyfa root:"
|
||||
- ps: "ls \"C:\\projects\\pyfa\\\""
|
||||
|
||||
- ECHO "Installed SDKs:"
|
||||
- ps: "ls \"C:/Program Files/Microsoft SDKs/Windows\""
|
||||
|
||||
# Install Python (from the official .msi of http://python.org) and pip when
|
||||
# not already installed.
|
||||
# - ps: if (-not(Test-Path($env:PYTHON))) { & appveyor\install.ps1 }
|
||||
|
||||
# Prepend newly installed Python to the PATH of this build (this cannot be
|
||||
# done from inside the powershell script as it would require to restart
|
||||
# the parent CMD process).
|
||||
- "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
|
||||
|
||||
# Check that we have the expected version and architecture for Python
|
||||
- "python --version"
|
||||
- "python -c \"import struct; print(struct.calcsize('P') * 8)\""
|
||||
|
||||
# Upgrade to the latest version of pip to avoid it displaying warnings
|
||||
# about it being out of date.
|
||||
- "pip install --disable-pip-version-check --user --upgrade pip"
|
||||
|
||||
# Install the build dependencies of the project. If some dependencies contain
|
||||
# compiled extensions and are not provided as pre-built wheel packages,
|
||||
# pip will build them from source using the MSVC compiler matching the
|
||||
# target Python version and architecture
|
||||
# C:\\projects\\eve-gnosis\\
|
||||
- ECHO "Install pip requirements:"
|
||||
- "pip install -r requirements.txt"
|
||||
- "pip install -r requirements_test.txt"
|
||||
- "pip install -r requirements_build_windows.txt"
|
||||
|
||||
build_script:
|
||||
# Build the compiled extension
|
||||
# - "python setup.py build"
|
||||
- ECHO "Build pyfa:"
|
||||
#- copy C:\projects\pyfa\dist_assets\win\pyfa.spec C:\projects\pyfa\pyfa.spec
|
||||
- "python C:\\projects\\pyfa\\setup.py build"
|
||||
|
||||
#- ECHO "Build pyfa (Debug):"
|
||||
#- copy C:\projects\pyfa\dist_assets\win\pyfa_debug.spec C:\projects\pyfa\pyfa_debug.spec
|
||||
#- "pyinstaller.exe --clean --noconfirm --windowed --upx-dir=C:\\projects\\pyfa\\scripts\\upx.exe C:\\projects\\pyfa\\pyfa_debug.spec"
|
||||
|
||||
build: on
|
||||
|
||||
after_build:
|
||||
- ps: "ls \"./\""
|
||||
#- ps: "ls \"C:\\projects\\pyfa\\build\\pyfa\\\""
|
||||
- ps: "ls \"C:\\projects\\pyfa\\build\\\""
|
||||
- ps: "ls \"C:\\projects\\pyfa\\build\\exe.win32-2.7\\\""
|
||||
# Zip
|
||||
# APPVEYOR_PULL_REQUEST_NUMBER -and $env:APPVEYOR_BUILD_NUMBER
|
||||
#- 7z a build.zip -r C:\projects\pyfa\build\pyfa\*.*
|
||||
- 7z a pyfa.zip -r C:\projects\pyfa\build\exe.win32-2.7\*.*
|
||||
#- 7z a pyfa_debug.zip -r C:\projects\pyfa\dist\pyfa_debug\*.*
|
||||
|
||||
on_success:
|
||||
# Do nothing right now
|
||||
|
||||
test_script:
|
||||
#- tox
|
||||
#- "py.test --cov=./"
|
||||
# Run the project tests
|
||||
# - "%CMD_IN_ENV% python C:/projects/eve-gnosis/setup.py nosetests"
|
||||
|
||||
after_test:
|
||||
# If tests are successful, create binary packages for the project.
|
||||
# - "%CMD_IN_ENV% python setup.py bdist_wheel"
|
||||
# - "%CMD_IN_ENV% python setup.py bdist_wininst"
|
||||
# - "%CMD_IN_ENV% python setup.py bdist_msi"
|
||||
# - ps: "ls dist"
|
||||
|
||||
artifacts:
|
||||
# Archive the generated packages in the ci.appveyor.com build report.
|
||||
- path: pyfa.zip
|
||||
name: 'pyfa.zip'
|
||||
#- path: pyfa_debug.zip
|
||||
# name: Pyfa_debug
|
||||
|
||||
#on_success:
|
||||
# - TODO: upload the content of dist/*.whl to a public wheelhouse
|
||||
#
|
||||
6
.gitattributes
vendored
6
.gitattributes
vendored
@@ -1,9 +1,11 @@
|
||||
# Set the default behavior, in case people don't have core.autocrlf set.
|
||||
* text=auto
|
||||
|
||||
# Explicitly declare text files you want to always be normalized and converted
|
||||
# to native line endings on checkout.
|
||||
# *.c text
|
||||
# *.h text
|
||||
|
||||
# Declare files that will always have CRLF line endings on checkout.
|
||||
# Source files
|
||||
# ============
|
||||
@@ -13,6 +15,7 @@
|
||||
*.pyw text eol=crlf
|
||||
*.pyx text eol=crlf
|
||||
pyfa.py text eol=lf
|
||||
|
||||
# Denote all files that are truly binary and should not be modified.
|
||||
# Binary files
|
||||
# ============
|
||||
@@ -22,10 +25,12 @@ pyfa.py text eol=lf
|
||||
*.pyc binary
|
||||
*.pyd binary
|
||||
*.pyo binary
|
||||
|
||||
# Note: .db, .p, and .pkl files are associated
|
||||
# with the python modules ``pickle``, ``dbm.*``,
|
||||
# ``shelve``, ``marshal``, ``anydbm``, & ``bsddb``
|
||||
# (among others).
|
||||
|
||||
# Denote all files that are truly binary and should not be modified.
|
||||
# Image files
|
||||
# ============
|
||||
@@ -33,4 +38,3 @@ pyfa.py text eol=lf
|
||||
*.jpg binary
|
||||
*.icns binary
|
||||
*.ico binary
|
||||
|
||||
|
||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -67,6 +67,7 @@ coverage.xml
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
@@ -90,7 +91,6 @@ target/
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
PyfaEnv/
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
@@ -118,12 +118,4 @@ ENV/
|
||||
.idea
|
||||
eos.iml
|
||||
gitversion
|
||||
.version
|
||||
/.version
|
||||
*.swp
|
||||
|
||||
*.fsdbinary
|
||||
/locale/progress.json
|
||||
|
||||
# vscode settings
|
||||
.vscode
|
||||
14
.mailmap
14
.mailmap
@@ -1,14 +0,0 @@
|
||||
cncfanatics <diego.duclos@gmail.com> cncfanatics <cncfanatics@titanium.(none)>
|
||||
blitzmann <holmes.ryan.90@gmail.com>
|
||||
blitzmann <holmes.ryan.90@gmail.com> blitzmann <ryan.xgamer99@gmail.com>
|
||||
blitzmann <holmes.ryan.90@gmail.com>
|
||||
blitzmann <holmes.ryan.90@gmail.com> blitzman <ryan.xgamer99@gmail.com>
|
||||
blitzmann <holmes.ryan.90@gmail.com> Ryan Holmes <ryan.holmes.90@gmail.com>
|
||||
blitzmann <holmes.ryan.90@gmail.com>
|
||||
Corollax <corollax@gmail.com> Corollax <corollax@corollax-laptop.(none)>
|
||||
Corollax <corollax@gmail.com> Corollax <corollax@corollax-N76VM.(none)>
|
||||
Mr. Nukealizer <mr.nukealizer@gmail.com> Mr. Nukealizer <MrNukealizer@users.noreply.github.com>
|
||||
DarkPhoenix <phoenix@mail.ru>
|
||||
Sakari Orisi <sakari@evefit.org>
|
||||
Will Wykeham <will@wykeham.net> Will Wykeham <will.wykeham@paconsulting.com>
|
||||
OISumeko <camerongrout@gmail.com> OISumeko <cameron@sporadic.co.nz>
|
||||
39
.travis.yml
Normal file
39
.travis.yml
Normal file
@@ -0,0 +1,39 @@
|
||||
language: python
|
||||
cache: pip
|
||||
python:
|
||||
- '2.7'
|
||||
env:
|
||||
- TOXENV=pep8
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
before_install:
|
||||
- sudo apt-get update && sudo apt-get --reinstall install -qq language-pack-en language-pack-ru language-pack-he language-pack-zh-hans
|
||||
- pip install tox
|
||||
# We're not actually installing Tox, but have to run it before we install wxPython via Conda. This is fugly but vOv
|
||||
- tox
|
||||
# get Conda
|
||||
- if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then
|
||||
wget https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh;
|
||||
else
|
||||
wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh;
|
||||
fi
|
||||
- bash miniconda.sh -b -p $HOME/miniconda
|
||||
- export PATH="$HOME/miniconda/bin:$PATH"
|
||||
- hash -r
|
||||
- conda config --set always_yes yes --set changeps1 no
|
||||
- conda update -q conda
|
||||
# Useful for debugging any issues with conda
|
||||
- conda info -a
|
||||
install:
|
||||
# install wxPython 3.0.0.0
|
||||
- conda install -c https://conda.anaconda.org/travis wxpython
|
||||
before_script:
|
||||
- pip install -r requirements.txt
|
||||
- pip install -r requirements_test.txt
|
||||
script:
|
||||
- py.test --cov=./
|
||||
after_success:
|
||||
- bash <(curl -s https://codecov.io/bash)
|
||||
before_deploy:
|
||||
- pip install -r requirements_build_linux.txt
|
||||
107
CONTRIBUTING.md
107
CONTRIBUTING.md
@@ -1,107 +0,0 @@
|
||||
# Contribution
|
||||
|
||||
## Requirements
|
||||
|
||||
- Python 3.7
|
||||
- Git CLI installed
|
||||
- Python, pip and git are all available as command-line commands (add to the path if needed)
|
||||
|
||||
Virtual environment will be created in *PyfaEnv* folder. Project will be cloned and run from the *PyfaDEV* folder. Separate virtual environment will be created so required libraries won't clutter the main python installation.
|
||||
|
||||
> Commands and screens were created on Windows 10. Please, update all the paths according to your OS.
|
||||
|
||||
## Setting up the project manually
|
||||
|
||||
Clone the repository
|
||||
```
|
||||
git clone <repo> PyfaDEV
|
||||
```
|
||||
|
||||
Create the virtual environment
|
||||
```
|
||||
python -m venv PyfaEnv
|
||||
```
|
||||
|
||||
Activate the virtual environment
|
||||
|
||||
```
|
||||
For cmd.exe: PyfaEnv\scripts\activate.bat
|
||||
For bash: source <venv>/Scripts/activate
|
||||
```
|
||||
> For other OS check [Python documentation](https://docs.python.org/3/library/venv.html)
|
||||
|
||||
Install requirements for the project from *requirements.txt*
|
||||
```
|
||||
pip install -r PyfaDEV\requirements.txt
|
||||
```
|
||||
> For some Linux distributions, you may need to install separate wxPython bindings, such as `python-matplotlib-wx`
|
||||
|
||||
Check that the libs from *requirements.txt* are installed
|
||||
```
|
||||
pip list
|
||||
```
|
||||
|
||||
Build translations and database:
|
||||
```
|
||||
python scripts\compile_lang.py
|
||||
python db_update.py
|
||||
```
|
||||
|
||||
Test that the project is starting properly
|
||||
```
|
||||
python PyfaDEV\pyfa.py
|
||||
```
|
||||
|
||||
|
||||
## Setting up the project with PyCharm/IntelliJ
|
||||
|
||||
Install PyCharm / Other IntelliJ product with Python plugin
|
||||
|
||||
After launching - select *Check out from Version Control* -> *GIt*
|
||||
|
||||

|
||||
|
||||
Login to GitHub, paste the repo URL and select the folder to which to clone the project into, press *Clone*.
|
||||
|
||||

|
||||
|
||||
After process is complete, open *File* -> *Settings* -> *Project* -> *Project Interpreter*.
|
||||
|
||||

|
||||
|
||||
Press on options and add new virtual environment.
|
||||
|
||||

|
||||
|
||||
Open project tree view and double-click on the *requirements.txt*. Press *Install requirements*. Install all requirements.
|
||||
|
||||

|
||||
|
||||
Create new *Run Configuration*. Set correct *Script path* and *Python interpreter*.
|
||||
|
||||

|
||||
|
||||
Check that the project is starting properly.
|
||||
|
||||
## Running tests
|
||||
|
||||
Switch to the proper virtual environment
|
||||
```
|
||||
For cmd.exe: PyfaEnv\scripts\activate.bat
|
||||
For bash: source <venv>/Scripts/activate
|
||||
```
|
||||
|
||||
Install pytest
|
||||
```
|
||||
pip install pytest
|
||||
```
|
||||
|
||||
Switch to pyfa directory.
|
||||
|
||||
Run tests (any will do)
|
||||
```
|
||||
python -m pytest
|
||||
py.test
|
||||
```
|
||||
|
||||
More information on tests can be found on appropriate [Wiki page](https://github.com/pyfa-org/Pyfa/wiki/Developers:-Writing-Tests-for-Pyfa).
|
||||
56
README.md
56
README.md
@@ -1,58 +1,66 @@
|
||||
# pyfa
|
||||
|
||||
[]([https://travis-ci.org/pyfa-org/Pyfa](https://ci.appveyor.com/project/pyfa-org/pyfa))
|
||||
[](https://pyfainvite.azurewebsites.net/) [](https://travis-ci.org/pyfa-org/Pyfa)
|
||||
|
||||

|
||||

|
||||
|
||||
## What is it?
|
||||
|
||||
Pyfa, short for **py**thon **f**itting **a**ssistant, allows you to create, experiment with, and save ship fittings without being in game. Open source and written in Python, it is available on any platform where Python 3 and wxWidgets are available, including Windows, macOS, and Linux.
|
||||
pyfa, short for **py**thon **f**itting **a**ssistant, allows you to create, experiment with, and save ship fittings without being in game. Open source and written in Python, it is available on any platform where Python 2.x and wxWidgets are available, including Windows, Mac OS X, and Linux.
|
||||
|
||||
## Latest Version and Changelogs
|
||||
The latest version along with release notes can always be found on the project's [releases](https://github.com/pyfa-org/Pyfa/releases) page. Pyfa will notify you if you are running an outdated version.
|
||||
The latest version along with release notes can always be found on the project's [Releases](https://github.com/DarkFenX/Pyfa/releases) page. pyfa will notify you if you are running an outdated version.
|
||||
|
||||
## Installation
|
||||
Windows, macOS, and Linux users are supplied self-contained builds of pyfa on the [latest releases](https://github.com/pyfa-org/Pyfa/releases/latest) page.
|
||||
Windows and OS X users are supplied self-contained builds of pyfa on the [latest releases](https://github.com/pyfa-org/Pyfa/releases/latest) page. An `.exe` installer is also available for Windows builds. Linux users can run pyfa using their distribution's Python interpreter. There is no official self-contained package for Linux, however, there are a number of third-party packages available through distribution-specific repositories.
|
||||
|
||||
### Third Party Packages
|
||||
Please note that these packages are maintained by third-parties and are not evaluated by the pyfa developers.
|
||||
#### OS X
|
||||
There are two different distributives for OS X: `-mac` and `-mac-deprecated`.
|
||||
|
||||
#### macOS
|
||||
Apart from the official release, there is also a [Homebrew](https://formulae.brew.sh/cask/pyfa) option for installing pyfa on macOS. Simply fire up in terminal:
|
||||
* `-mac`: based on wxPython 3.0.2.0 and has updated libraries. This is the recommended build.
|
||||
* `-mac-deprecated`: utilizes older binaries running on wxPython 2.8; because of this, some features are not available (currently CREST support and Attribute Overrides). Additionally, as development happens primarily on wxPython 3.0, a few GUI bugs may pop up as `-mac-deprecated` is not actively tested. However, due to some general issues with wxPython 3.0, especially on some newer OS X versions, `-mac-deprecated` is still offered for those that need it.
|
||||
|
||||
There is also a [Homebrew](http://brew.sh) option for installing pyfa on OS X. Please note this is maintained by a third-party and is not tested by pyfa developers. Simply fire up in terminal:
|
||||
```
|
||||
$ brew install --cask pyfa
|
||||
$ brew install Caskroom/cask/pyfa
|
||||
```
|
||||
|
||||
#### Linux Distro-specific Packages
|
||||
The following is a list of pyfa packages available for certain distributions.
|
||||
### Linux Distro-specific Packages
|
||||
The following is a list of pyfa packages available for certain distributions. Please note that these packages are maintained by third-parties and are not evaluated by the pyfa developers.
|
||||
|
||||
* Debian/Ubuntu/derivitives: https://github.com/AdamMajer/Pyfa/releases
|
||||
* Arch: https://aur.archlinux.org/packages/pyfa/
|
||||
* Gentoo: https://github.com/ZeroPointEnergy/gentoo-pyfa-overlay
|
||||
* openSUSE: https://build.opensuse.org/package/show/home:rmk2/pyfa
|
||||
* FreeBSD: http://www.freshports.org/games/pyfa/ (see [#484](https://github.com/pyfa-org/Pyfa/issues/484) for instructions)
|
||||
|
||||
## Contribution
|
||||
If you wish to help with development or you need to run pyfa through a Python interpreter, check out [the instructions](https://github.com/pyfa-org/Pyfa/blob/master/CONTRIBUTING.md).
|
||||
### Dependencies
|
||||
If you wish to help with development or simply need to run pyfa through a Python interpreter, the following software is required:
|
||||
|
||||
* Python 2.7
|
||||
* `wxPython` 2.8/3.0
|
||||
* `sqlalchemy` >= 1.0.5
|
||||
* `dateutil`
|
||||
* `matplotlib` (for some Linux distributions you may need to install separate wxPython bindings such as `python-matplotlib-wx`)
|
||||
* `requests`
|
||||
* `logbook` >= 1.0.0
|
||||
|
||||
## Bug Reporting
|
||||
The preferred method of reporting bugs is through the project's [GitHub Issues interface](https://github.com/pyfa-org/Pyfa/issues). Alternatively, posting a report in the [pyfa thread](https://forums.eveonline.com/t/27156) on the official EVE Online forums is acceptable. Guidelines for bug reporting can be found on [this wiki page](https://github.com/pyfa-org/Pyfa/wiki/Bug-Reporting).
|
||||
The preferred method of reporting bugs is through the project's [GitHub Issues interface](https://github.com/pyfa-org/Pyfa/issues). Alternatively, posting a report in the [pyfa thread](http://forums.eveonline.com/default.aspx?g=posts&t=247609) on the official EVE Online forums is acceptable. Guidelines for bug reporting can be found on [this wiki page](https://github.com/DarkFenX/Pyfa/wiki/Bug-Reporting).
|
||||
|
||||
## License
|
||||
Pyfa is licensed under the GNU GPL v3.0, see LICENSE
|
||||
pyfa is licensed under the GNU GPL v3.0, see LICENSE
|
||||
|
||||
## Resources
|
||||
* [Development repository](https://github.com/pyfa-org/Pyfa)
|
||||
* Development repository: [https://github.com/pyfa-org/Pyfa](https://github.com/pyfa-org/Pyfa)
|
||||
* [EVE forum thread](https://forums.eveonline.com/t/27156)
|
||||
* [EVE University guide using pyfa](https://wiki.eveuniversity.org/PYFA)
|
||||
* [EVE University guide using pyfa](http://wiki.eveuniversity.org/Guide_to_using_PYFA)
|
||||
* [EVE Online website](http://www.eveonline.com/)
|
||||
|
||||
## Contacts:
|
||||
* Kadesh / DarkPhoenix
|
||||
* GitHub: @DarkFenX
|
||||
* EVE: Kadesh Priestess
|
||||
* Email: phoenix@mail.ru
|
||||
* Sable Blitzmann
|
||||
* GitHub: @blitzmann
|
||||
* [TweetFleet Slack](https://www.fuzzwork.co.uk/tweetfleet-slack-invites/): @blitzmann
|
||||
* [Gitter chat](https://gitter.im/pyfa-org/Pyfa): @blitzmann
|
||||
* [Gitter chat](https://gitter.im/pyfa-org/Pyfa): @ blitzmann
|
||||
* Email: sable.blitzmann@gmail.com
|
||||
|
||||
## CCP Copyright Notice
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
<option name="RIGHT_MARGIN" value="165" />
|
||||
<Python>
|
||||
<option name="NEW_LINE_AFTER_COLON" value="true" />
|
||||
<option name="DICT_ALIGNMENT" value="2" />
|
||||
<option name="DICT_NEW_LINE_AFTER_LEFT_BRACE" value="true" />
|
||||
<option name="DICT_NEW_LINE_BEFORE_RIGHT_BRACE" value="true" />
|
||||
<option name="USE_CONTINUATION_INDENT_FOR_ARGUMENTS" value="true" />
|
||||
|
||||
@@ -1,61 +1,54 @@
|
||||
<profile version="1.0">
|
||||
<option name="myName" value="Pyfa" />
|
||||
<inspection_tool class="IgnoreUnusedEntry" enabled="false" level="UNUSED ENTRY" enabled_by_default="false" />
|
||||
<inspection_tool class="InconsistentLineSeparators" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||
<inspection_tool class="ProblematicWhitespace" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="PyBehaveInspection" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="PyClassicStyleClassInspection" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="PyCompatibilityInspection" enabled="true" level="WARNING" enabled_by_default="true">
|
||||
<option name="ourVersions">
|
||||
<value>
|
||||
<list size="1">
|
||||
<item index="0" class="java.lang.String" itemvalue="2.7" />
|
||||
</list>
|
||||
</value>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
<inspection_tool class="PyMissingTypeHintsInspection" enabled="true" level="WEAK WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="PyPackageRequirementsInspection" enabled="true" level="WARNING" enabled_by_default="true">
|
||||
<option name="ignoredPackages">
|
||||
<value>
|
||||
<list size="1">
|
||||
<item index="0" class="java.lang.String" itemvalue="wxPython" />
|
||||
</list>
|
||||
</value>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
<inspection_tool class="PyPep8Inspection" enabled="true" level="TYPO" enabled_by_default="true">
|
||||
<option name="ignoredErrors">
|
||||
<list>
|
||||
<option value="E203" />
|
||||
<option value="E127" />
|
||||
<option value="E128" />
|
||||
<option value="E126" />
|
||||
<profile version="1.0">
|
||||
<option name="myName" value="Pyfa" />
|
||||
<inspection_tool class="IgnoreUnusedEntry" enabled="false" level="UNUSED ENTRY" enabled_by_default="false" />
|
||||
<inspection_tool class="InconsistentLineSeparators" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||
<inspection_tool class="ProblematicWhitespace" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="PyBehaveInspection" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="PyClassicStyleClassInspection" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="PyCompatibilityInspection" enabled="true" level="WARNING" enabled_by_default="true">
|
||||
<option name="ourVersions">
|
||||
<value>
|
||||
<list size="1">
|
||||
<item index="0" class="java.lang.String" itemvalue="2.7" />
|
||||
</list>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
<inspection_tool class="PyPep8NamingInspection" enabled="true" level="TYPO" enabled_by_default="true">
|
||||
<option name="ignoredErrors">
|
||||
<list>
|
||||
<option value="N802" />
|
||||
<option value="N806" />
|
||||
<option value="N803" />
|
||||
<option value="N814" />
|
||||
</value>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
<inspection_tool class="PyMissingTypeHintsInspection" enabled="true" level="WEAK WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="PyPackageRequirementsInspection" enabled="true" level="WARNING" enabled_by_default="true">
|
||||
<option name="ignoredPackages">
|
||||
<value>
|
||||
<list size="1">
|
||||
<item index="0" class="java.lang.String" itemvalue="wxPython" />
|
||||
</list>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
<inspection_tool class="PyShadowingBuiltinsInspection" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="PyShadowingNamesInspection" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="PyUnresolvedReferencesInspection" enabled="true" level="WARNING" enabled_by_default="true">
|
||||
<option name="ignoredIdentifiers">
|
||||
<list>
|
||||
<option value="_" />
|
||||
</list>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
<inspection_tool class="SpellCheckingInspection" enabled="false" level="TYPO" enabled_by_default="false">
|
||||
<option name="processCode" value="true" />
|
||||
<option name="processLiterals" value="true" />
|
||||
<option name="processComments" value="true" />
|
||||
</inspection_tool>
|
||||
</profile>
|
||||
</value>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
<inspection_tool class="PyPep8Inspection" enabled="true" level="TYPO" enabled_by_default="true">
|
||||
<option name="ignoredErrors">
|
||||
<list>
|
||||
<option value="E203" />
|
||||
<option value="E127" />
|
||||
<option value="E128" />
|
||||
<option value="E126" />
|
||||
</list>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
<inspection_tool class="PyPep8NamingInspection" enabled="true" level="TYPO" enabled_by_default="true">
|
||||
<option name="ignoredErrors">
|
||||
<list>
|
||||
<option value="N802" />
|
||||
<option value="N806" />
|
||||
<option value="N803" />
|
||||
<option value="N814" />
|
||||
</list>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
<inspection_tool class="PyShadowingBuiltinsInspection" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="PyShadowingNamesInspection" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="SpellCheckingInspection" enabled="false" level="TYPO" enabled_by_default="false">
|
||||
<option name="processCode" value="true" />
|
||||
<option name="processLiterals" value="true" />
|
||||
<option name="processComments" value="true" />
|
||||
</inspection_tool>
|
||||
</profile>
|
||||
|
||||
@@ -28,7 +28,7 @@ def DBInMemory_test():
|
||||
gamedataCache = True
|
||||
saveddataCache = True
|
||||
gamedata_version = ""
|
||||
gamedata_connectionstring = 'sqlite:///' + realpath(join(dirname(abspath(str(__file__))), "..", "eve.db"))
|
||||
gamedata_connectionstring = 'sqlite:///' + realpath(join(dirname(abspath(unicode(__file__))), "..", "eve.db"))
|
||||
saveddata_connectionstring = 'sqlite:///:memory:'
|
||||
|
||||
class ReadOnlyException(Exception):
|
||||
@@ -49,8 +49,6 @@ def DBInMemory_test():
|
||||
gamedata_version = gamedata_session.execute(
|
||||
"SELECT `field_value` FROM `metadata` WHERE `field_name` LIKE 'client_build'"
|
||||
).fetchone()[0]
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except Exception as e:
|
||||
print("Missing gamedata version.")
|
||||
gamedata_version = None
|
||||
@@ -74,7 +72,7 @@ def DBInMemory_test():
|
||||
# noinspection PyPep8
|
||||
#from eos.db.gamedata import alphaClones, attribute, category, effect, group, icon, item, marketGroup, metaData, metaGroup, queries, traits, unit
|
||||
# noinspection PyPep8
|
||||
#from eos.db.saveddata import booster, cargo, character, crest, damagePattern, databaseRepair, drone, fighter, fit, implant, implantSet, miscData, module, override, price, queries, skill, targetProfile, user
|
||||
#from eos.db.saveddata import booster, cargo, character, crest, damagePattern, databaseRepair, drone, fighter, fit, implant, implantSet, loadDefaultDatabaseValues, miscData, module, override, price, queries, skill, targetResists, user
|
||||
|
||||
# If using in memory saveddata, you'll want to reflect it so the data structure is good.
|
||||
if saveddata_connectionstring == "sqlite:///:memory:":
|
||||
@@ -131,8 +129,7 @@ def Saveddata():
|
||||
from eos.saveddata.ship import Ship
|
||||
from eos.saveddata.fit import Fit
|
||||
from eos.saveddata.character import Character
|
||||
from eos.saveddata.module import Module
|
||||
from eos.const import FittingModuleState
|
||||
from eos.saveddata.module import Module, State
|
||||
from eos.saveddata.citadel import Citadel
|
||||
from eos.saveddata.booster import Booster
|
||||
|
||||
@@ -142,7 +139,7 @@ def Saveddata():
|
||||
'Fit' : Fit,
|
||||
'Character': Character,
|
||||
'Module' : Module,
|
||||
'State' : FittingModuleState,
|
||||
'State' : State,
|
||||
'Booster' : Booster,
|
||||
}
|
||||
return helper
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import pytest
|
||||
|
||||
# noinspection PyPackageRequirements
|
||||
from _development.helpers import DBInMemory as DB, Gamedata, Saveddata
|
||||
|
||||
|
||||
# noinspection PyShadowingNames
|
||||
@@ -40,7 +41,7 @@ def CurseFit(DB, Gamedata, Saveddata):
|
||||
mod.state = Saveddata['State'].ONLINE
|
||||
|
||||
# Add 5 neuts
|
||||
for _ in range(5):
|
||||
for _ in xrange(5):
|
||||
fit.modules.append(mod)
|
||||
|
||||
return fit
|
||||
@@ -59,7 +60,7 @@ def HeronFit(DB, Gamedata, Saveddata):
|
||||
mod.state = Saveddata['State'].ONLINE
|
||||
|
||||
# Add 5 neuts
|
||||
for _ in range(4):
|
||||
for _ in xrange(4):
|
||||
fit.modules.append(mod)
|
||||
|
||||
return fit
|
||||
return fit
|
||||
@@ -1,6 +1,7 @@
|
||||
import pytest
|
||||
|
||||
# noinspection PyPackageRequirements
|
||||
from _development.helpers import DBInMemory as DB, Gamedata, Saveddata
|
||||
|
||||
|
||||
# noinspection PyShadowingNames
|
||||
|
||||
@@ -94,8 +94,8 @@ def GetUnicodePath(root, file=None, codec=None):
|
||||
path = os.path.join(path, file)
|
||||
|
||||
if codec:
|
||||
path = str(path, codec)
|
||||
path = unicode(path, codec)
|
||||
else:
|
||||
path = str(path)
|
||||
path = unicode(path)
|
||||
|
||||
return path
|
||||
|
||||
195
config.py
195
config.py
@@ -1,14 +1,7 @@
|
||||
import os
|
||||
import sys
|
||||
import yaml
|
||||
import wx
|
||||
|
||||
from logbook import CRITICAL, DEBUG, ERROR, FingersCrossedHandler, INFO, Logger, NestedSetup, NullHandler, \
|
||||
StreamHandler, TimedRotatingFileHandler, WARNING
|
||||
import hashlib
|
||||
from eos.const import FittingSlot
|
||||
|
||||
from cryptography.fernet import Fernet
|
||||
from logbook import Logger
|
||||
|
||||
pyfalog = Logger(__name__)
|
||||
|
||||
@@ -25,49 +18,18 @@ debug = False
|
||||
# Defines if our saveddata will be in pyfa root or not
|
||||
saveInRoot = False
|
||||
|
||||
# Version data
|
||||
version = "1.37.0"
|
||||
tag = "Stable"
|
||||
expansionName = "YC120.3"
|
||||
expansionVersion = "1.8"
|
||||
evemonMinVersion = "4081"
|
||||
|
||||
minItemSearchLength = 3
|
||||
minItemSearchLengthCjk = 1
|
||||
|
||||
pyfaPath = None
|
||||
savePath = None
|
||||
saveDB = None
|
||||
gameDB = None
|
||||
imgsZIP = None
|
||||
logPath = None
|
||||
loggingLevel = None
|
||||
logging_setup = None
|
||||
cipher = None
|
||||
clientHash = None
|
||||
experimentalFeatures = None
|
||||
version = None
|
||||
language = None
|
||||
|
||||
API_CLIENT_ID = '095d8cd841ac40b581330919b49fe746'
|
||||
ESI_CACHE = 'esi_cache'
|
||||
SSO_CALLBACK = 'https://pyfa-org.github.io/Pyfa/callback'
|
||||
|
||||
LOGLEVEL_MAP = {
|
||||
"critical": CRITICAL,
|
||||
"error": ERROR,
|
||||
"warning": WARNING,
|
||||
"info": INFO,
|
||||
"debug": DEBUG,
|
||||
}
|
||||
|
||||
CATALOG = 'lang'
|
||||
|
||||
slotColourMap = {
|
||||
FittingSlot.LOW: wx.Colour(250, 235, 204), # yellow = low slots
|
||||
FittingSlot.MED: wx.Colour(188, 215, 241), # blue = mid slots
|
||||
FittingSlot.HIGH: wx.Colour(235, 204, 209), # red = high slots
|
||||
FittingSlot.RIG: '',
|
||||
FittingSlot.SUBSYSTEM: ''
|
||||
}
|
||||
|
||||
def getClientSecret():
|
||||
return clientHash
|
||||
|
||||
|
||||
def isFrozen():
|
||||
@@ -83,36 +45,23 @@ def __createDirs(path):
|
||||
|
||||
|
||||
def getPyfaRoot():
|
||||
if hasattr(sys, '_MEIPASS'):
|
||||
return sys._MEIPASS
|
||||
base = getattr(sys.modules['__main__'], "__file__", sys.executable) if isFrozen() else __file__
|
||||
base = getattr(sys.modules['__main__'], "__file__", sys.executable) if isFrozen() else sys.argv[0]
|
||||
root = os.path.dirname(os.path.realpath(os.path.abspath(base)))
|
||||
root = root
|
||||
root = unicode(root, sys.getfilesystemencoding())
|
||||
return root
|
||||
|
||||
|
||||
def getVersion():
|
||||
return version
|
||||
|
||||
|
||||
def getDefaultSave():
|
||||
return os.path.expanduser(os.path.join("~", ".pyfa"))
|
||||
return unicode(os.path.expanduser(os.path.join("~", ".pyfa")), sys.getfilesystemencoding())
|
||||
|
||||
|
||||
def defPaths(customSavePath=None):
|
||||
def defPaths(customSavePath):
|
||||
global debug
|
||||
global pyfaPath
|
||||
global savePath
|
||||
global saveDB
|
||||
global gameDB
|
||||
global imgsZIP
|
||||
global saveInRoot
|
||||
global logPath
|
||||
global cipher
|
||||
global clientHash
|
||||
global version
|
||||
global experimentalFeatures
|
||||
global language
|
||||
|
||||
pyfalog.debug("Configuring Pyfa")
|
||||
|
||||
@@ -122,12 +71,6 @@ def defPaths(customSavePath=None):
|
||||
if pyfaPath is None:
|
||||
pyfaPath = getPyfaRoot()
|
||||
|
||||
# Version data
|
||||
|
||||
with open(os.path.join(pyfaPath, "version.yml"), 'r') as file:
|
||||
data = yaml.load(file, Loader=yaml.SafeLoader)
|
||||
version = data['version']
|
||||
|
||||
# Where we store the saved fits etc, default is the current users home directory
|
||||
if saveInRoot is True:
|
||||
savePath = getattr(configforced, "savePath", None)
|
||||
@@ -143,19 +86,9 @@ def defPaths(customSavePath=None):
|
||||
|
||||
__createDirs(savePath)
|
||||
|
||||
secret_file = os.path.join(savePath, ".secret")
|
||||
if not os.path.exists(secret_file):
|
||||
with open(secret_file, "wb") as _file:
|
||||
_file.write(Fernet.generate_key())
|
||||
|
||||
with open(secret_file, 'rb') as fp:
|
||||
key = fp.read()
|
||||
clientHash = hashlib.sha3_256(key).hexdigest()
|
||||
cipher = Fernet(key)
|
||||
|
||||
# if isFrozen():
|
||||
# os.environ["REQUESTS_CA_BUNDLE"] = os.path.join(pyfaPath, "cacert.pem")
|
||||
# os.environ["SSL_CERT_FILE"] = os.path.join(pyfaPath, "cacert.pem")
|
||||
if isFrozen():
|
||||
os.environ["REQUESTS_CA_BUNDLE"] = os.path.join(pyfaPath, "cacert.pem").encode('utf8')
|
||||
os.environ["SSL_CERT_FILE"] = os.path.join(pyfaPath, "cacert.pem").encode('utf8')
|
||||
|
||||
# The database where we store all the fits etc
|
||||
saveDB = os.path.join(savePath, "saveddata.db")
|
||||
@@ -167,21 +100,6 @@ def defPaths(customSavePath=None):
|
||||
if not gameDB:
|
||||
gameDB = os.path.join(pyfaPath, "eve.db")
|
||||
|
||||
imgsZIP = getattr(configforced, "imgsZIP", imgsZIP)
|
||||
if not imgsZIP:
|
||||
imgsZIP = os.path.join(pyfaPath, "imgs.zip")
|
||||
|
||||
if debug:
|
||||
logFile = "pyfa_debug.log"
|
||||
else:
|
||||
logFile = "pyfa.log"
|
||||
|
||||
logPath = os.path.join(savePath, logFile)
|
||||
|
||||
experimentalFeatures = getattr(configforced, "experimentalFeatures", experimentalFeatures)
|
||||
if experimentalFeatures is None:
|
||||
experimentalFeatures = False
|
||||
|
||||
# DON'T MODIFY ANYTHING BELOW
|
||||
import eos.config
|
||||
|
||||
@@ -192,90 +110,5 @@ def defPaths(customSavePath=None):
|
||||
eos.config.gamedata_connectionstring = "sqlite:///" + gameDB + "?check_same_thread=False"
|
||||
|
||||
# initialize the settings
|
||||
from service.settings import EOSSettings, LocaleSettings
|
||||
from service.settings import EOSSettings
|
||||
eos.config.settings = EOSSettings.getInstance().EOSSettings # this is kind of confusing, but whatever
|
||||
|
||||
# set langauge, taking the passed argument or falling back to what's saved in the settings
|
||||
localeSettings = LocaleSettings.getInstance()
|
||||
language = language or localeSettings.get('locale')
|
||||
|
||||
# sets the lang for eos, using the mapped langauge.
|
||||
eos.config.set_lang(localeSettings.get_eos_locale())
|
||||
|
||||
def defLogging():
|
||||
global debug
|
||||
global logPath
|
||||
global loggingLevel
|
||||
global logging_setup
|
||||
|
||||
try:
|
||||
if debug:
|
||||
logging_setup = NestedSetup([
|
||||
# make sure we never bubble up to the stderr handler
|
||||
# if we run out of setup handling
|
||||
NullHandler(),
|
||||
StreamHandler(
|
||||
sys.stdout,
|
||||
bubble=False,
|
||||
level=loggingLevel
|
||||
),
|
||||
TimedRotatingFileHandler(
|
||||
logPath,
|
||||
level=0,
|
||||
backup_count=3,
|
||||
bubble=True,
|
||||
date_format='%Y-%m-%d',
|
||||
),
|
||||
])
|
||||
else:
|
||||
logging_setup = NestedSetup([
|
||||
# make sure we never bubble up to the stderr handler
|
||||
# if we run out of setup handling
|
||||
NullHandler(),
|
||||
FingersCrossedHandler(
|
||||
TimedRotatingFileHandler(
|
||||
logPath,
|
||||
level=0,
|
||||
backup_count=3,
|
||||
bubble=False,
|
||||
date_format='%Y-%m-%d',
|
||||
),
|
||||
action_level=ERROR,
|
||||
buffer_size=1000,
|
||||
# pull_information=True,
|
||||
# reset=False,
|
||||
)
|
||||
])
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except:
|
||||
print("Critical error attempting to setup logging. Falling back to console only.")
|
||||
logging_setup = NestedSetup([
|
||||
# make sure we never bubble up to the stderr handler
|
||||
# if we run out of setup handling
|
||||
NullHandler(),
|
||||
StreamHandler(
|
||||
sys.stdout,
|
||||
bubble=False
|
||||
)
|
||||
])
|
||||
|
||||
|
||||
class LoggerWriter:
|
||||
def __init__(self, level):
|
||||
# self.level is really like using log.debug(message)
|
||||
# at least in my case
|
||||
self.level = level
|
||||
|
||||
def write(self, message):
|
||||
# if statement reduces the amount of newlines that are
|
||||
# printed to the logger
|
||||
if message.strip() != '':
|
||||
self.level(message.replace("\n", ""))
|
||||
|
||||
def flush(self):
|
||||
# create a flush method so things can be flushed when
|
||||
# the system wants to. Not sure if simply 'printing'
|
||||
# sys.stderr is the correct way to do it, but it seemed
|
||||
# to work properly for me.
|
||||
self.level(sys.stderr)
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
files:
|
||||
- source: /locale/*.pot
|
||||
translation: /locale/%locale_with_underscore%/LC_MESSAGES/lang.po
|
||||
807
db_update.py
807
db_update.py
@@ -1,807 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
#======================================================================
|
||||
# Copyright (C) 2012 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
#
|
||||
# eos is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of
|
||||
# the License, or (at your option) any later version.
|
||||
#
|
||||
# eos is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
#======================================================================
|
||||
|
||||
|
||||
import functools
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sqlite3
|
||||
import sys
|
||||
|
||||
import sqlalchemy.orm
|
||||
from sqlalchemy import or_, and_
|
||||
|
||||
|
||||
# todo: need to set the EOS language to en, becasuse this assumes it's being run within an English context
|
||||
# Need to know what that would do if called from pyfa
|
||||
ROOT_DIR = os.path.realpath(os.path.dirname(__file__))
|
||||
DB_PATH = os.path.join(ROOT_DIR, 'eve.db')
|
||||
JSON_DIR = os.path.join(ROOT_DIR, 'staticdata')
|
||||
if ROOT_DIR not in sys.path:
|
||||
sys.path.insert(0, ROOT_DIR)
|
||||
GAMEDATA_SCHEMA_VERSION = 4
|
||||
|
||||
|
||||
def db_needs_update():
|
||||
"""True if needs, false if it does not, none if we cannot check it."""
|
||||
try:
|
||||
with open(os.path.join(JSON_DIR, 'phobos', 'metadata.0.json')) as f:
|
||||
data_version = next((r['field_value'] for r in json.load(f) if r['field_name'] == 'client_build'))
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
# If we have no source data - return None; should not update in this case
|
||||
except:
|
||||
return None
|
||||
if not os.path.isfile(DB_PATH):
|
||||
print('Gamedata DB not found')
|
||||
return True
|
||||
db_data_version = None
|
||||
db_schema_version = None
|
||||
try:
|
||||
db = sqlite3.connect(DB_PATH)
|
||||
cursor = db.cursor()
|
||||
cursor.execute('SELECT field_value FROM metadata WHERE field_name = \'client_build\'')
|
||||
for row in cursor:
|
||||
db_data_version = int(row[0])
|
||||
cursor.execute('SELECT field_value FROM metadata WHERE field_name = \'schema_version\'')
|
||||
for row in cursor:
|
||||
db_schema_version = int(row[0])
|
||||
cursor.close()
|
||||
db.close()
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except:
|
||||
print('Error when fetching gamedata DB metadata')
|
||||
return True
|
||||
if data_version != db_data_version:
|
||||
print('Gamedata DB data version mismatch: needed {}, DB has {}'.format(data_version, db_data_version))
|
||||
return True
|
||||
if GAMEDATA_SCHEMA_VERSION != db_schema_version:
|
||||
print('Gamedata DB schema version mismatch: needed {}, DB has {}'.format(GAMEDATA_SCHEMA_VERSION, db_schema_version))
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def update_db():
|
||||
|
||||
print('Building gamedata DB...')
|
||||
|
||||
if os.path.isfile(DB_PATH):
|
||||
os.remove(DB_PATH)
|
||||
|
||||
import eos.db
|
||||
import eos.gamedata
|
||||
import eos.config
|
||||
|
||||
# Create the database tables
|
||||
eos.db.gamedata_meta.create_all()
|
||||
|
||||
def _readData(minerName, jsonName, keyIdName=None):
|
||||
compiled_data = None
|
||||
for i in itertools.count(0):
|
||||
try:
|
||||
with open(os.path.join(JSON_DIR, minerName, '{}.{}.json'.format(jsonName, i)), encoding='utf-8') as f:
|
||||
rawData = json.load(f)
|
||||
if i == 0:
|
||||
compiled_data = {} if type(rawData) == dict else []
|
||||
if type(rawData) == dict:
|
||||
compiled_data.update(rawData)
|
||||
else:
|
||||
compiled_data.extend(rawData)
|
||||
except FileNotFoundError:
|
||||
break
|
||||
|
||||
if not keyIdName:
|
||||
return compiled_data
|
||||
# IDs in keys, rows in values
|
||||
data = []
|
||||
for k, v in compiled_data.items():
|
||||
row = {}
|
||||
row.update(v)
|
||||
row[keyIdName] = int(k)
|
||||
data.append(row)
|
||||
return data
|
||||
|
||||
def _addRows(data, cls, fieldMap=None):
|
||||
if fieldMap is None:
|
||||
fieldMap = {}
|
||||
for row in data:
|
||||
instance = cls()
|
||||
for k, v in row.items():
|
||||
if isinstance(v, str):
|
||||
v = v.strip()
|
||||
setattr(instance, fieldMap.get(k, k), v)
|
||||
eos.db.gamedata_session.add(instance)
|
||||
|
||||
def processEveTypes():
|
||||
print('processing evetypes')
|
||||
data = _readData('fsd_binary', 'types', keyIdName='typeID')
|
||||
for row in data:
|
||||
if (
|
||||
# Apparently people really want Civilian modules available
|
||||
(row['typeName_en-us'].startswith('Civilian') and "Shuttle" not in row['typeName_en-us'])
|
||||
or row['typeName_en-us'] == 'Capsule'
|
||||
or row['groupID'] == 4033 # destructible effect beacons
|
||||
or re.match('AIR .+Booster.*', row['typeName_en-us'])
|
||||
):
|
||||
row['published'] = True
|
||||
# Nearly useless and clutter search results too much
|
||||
elif (
|
||||
row['typeName_en-us'].startswith('Limited Synth ')
|
||||
or row['typeName_en-us'].startswith('Expired ')
|
||||
or re.match('Mining Blitz .+ Booster Dose .+', row['typeName_en-us'])
|
||||
or row['typeName_en-us'].endswith(' Filament') and (
|
||||
"'Needlejack'" not in row['typeName_en-us'] and
|
||||
"'Devana'" not in row['typeName_en-us'] and
|
||||
"'Pochven'" not in row['typeName_en-us'] and
|
||||
"'Extraction'" not in row['typeName_en-us'] and
|
||||
"'Krai Veles'" not in row['typeName_en-us'] and
|
||||
"'Krai Perun'" not in row['typeName_en-us'] and
|
||||
"'Krai Svarog'" not in row['typeName_en-us']
|
||||
)
|
||||
):
|
||||
row['published'] = False
|
||||
|
||||
newData = []
|
||||
for row in data:
|
||||
if (
|
||||
row['published'] or
|
||||
# group Ship Modifiers, for items like tactical t3 ship modes
|
||||
row['groupID'] == 1306 or
|
||||
# Micro Bombs (Fighters)
|
||||
row['typeID'] in (41549, 41548, 41551, 41550) or
|
||||
# Abyssal weather (environment)
|
||||
row['groupID'] in (
|
||||
1882,
|
||||
1975,
|
||||
1971,
|
||||
1983) # the "container" for the abyssal environments
|
||||
):
|
||||
newData.append(row)
|
||||
map = {'typeName_en-us': 'typeName', 'description_en-us': '_description'}
|
||||
map.update({'description'+v: '_description'+v for (k, v) in eos.config.translation_mapping.items() if k != 'en'})
|
||||
_addRows(newData, eos.gamedata.Item, fieldMap=map)
|
||||
return newData
|
||||
|
||||
def processEveGroups():
|
||||
print('processing evegroups')
|
||||
data = _readData('fsd_binary', 'groups', keyIdName='groupID')
|
||||
map = {'groupName_en-us': 'name'}
|
||||
map.update({'groupName'+v: 'name'+v for (k, v) in eos.config.translation_mapping.items() if k != 'en'})
|
||||
_addRows(data, eos.gamedata.Group, fieldMap=map)
|
||||
return data
|
||||
|
||||
def processEveCategories():
|
||||
print('processing evecategories')
|
||||
data = _readData('fsd_binary', 'categories', keyIdName='categoryID')
|
||||
map = { 'categoryName_en-us': 'name' }
|
||||
map.update({'categoryName'+v: 'name'+v for (k, v) in eos.config.translation_mapping.items() if k != 'en'})
|
||||
_addRows(data, eos.gamedata.Category, fieldMap=map)
|
||||
|
||||
def processDogmaAttributes():
|
||||
print('processing dogmaattributes')
|
||||
data = _readData('fsd_binary', 'dogmaattributes', keyIdName='attributeID')
|
||||
map = {
|
||||
'displayName_en-us': 'displayName',
|
||||
# 'tooltipDescription_en-us': 'tooltipDescription'
|
||||
}
|
||||
_addRows(data, eos.gamedata.AttributeInfo, fieldMap=map)
|
||||
|
||||
def processDogmaTypeAttributes(eveTypesData):
|
||||
print('processing dogmatypeattributes')
|
||||
data = _readData('fsd_binary', 'typedogma', keyIdName='typeID')
|
||||
eveTypeIds = set(r['typeID'] for r in eveTypesData)
|
||||
newData = []
|
||||
seenKeys = set()
|
||||
|
||||
def checkKey(key):
|
||||
if key in seenKeys:
|
||||
return False
|
||||
seenKeys.add(key)
|
||||
return True
|
||||
|
||||
for typeData in data:
|
||||
if typeData['typeID'] not in eveTypeIds:
|
||||
continue
|
||||
for row in typeData.get('dogmaAttributes', ()):
|
||||
row['typeID'] = typeData['typeID']
|
||||
if checkKey((row['typeID'], row['attributeID'])):
|
||||
newData.append(row)
|
||||
for row in eveTypesData:
|
||||
for attrId, attrName in {4: 'mass', 38: 'capacity', 161: 'volume', 162: 'radius'}.items():
|
||||
if attrName in row and checkKey((row['typeID'], attrId)):
|
||||
newData.append({'typeID': row['typeID'], 'attributeID': attrId, 'value': row[attrName]})
|
||||
|
||||
_addRows(newData, eos.gamedata.Attribute)
|
||||
return newData
|
||||
|
||||
def processDynamicItemAttributes():
|
||||
print('processing dynamicitemattributes')
|
||||
data = _readData('fsd_binary', 'dynamicitemattributes')
|
||||
for mutaID, mutaData in data.items():
|
||||
muta = eos.gamedata.DynamicItem()
|
||||
muta.typeID = mutaID
|
||||
muta.resultingTypeID = mutaData['inputOutputMapping'][0]['resultingType']
|
||||
eos.db.gamedata_session.add(muta)
|
||||
|
||||
for x in mutaData['inputOutputMapping'][0]['applicableTypes']:
|
||||
item = eos.gamedata.DynamicItemItem()
|
||||
item.typeID = mutaID
|
||||
item.applicableTypeID = x
|
||||
eos.db.gamedata_session.add(item)
|
||||
|
||||
for attrID, attrData in mutaData['attributeIDs'].items():
|
||||
attr = eos.gamedata.DynamicItemAttribute()
|
||||
attr.typeID = mutaID
|
||||
attr.attributeID = attrID
|
||||
attr.min = attrData['min']
|
||||
attr.max = attrData['max']
|
||||
eos.db.gamedata_session.add(attr)
|
||||
|
||||
def processDogmaEffects():
|
||||
print('processing dogmaeffects')
|
||||
data = _readData('fsd_binary', 'dogmaeffects', keyIdName='effectID')
|
||||
_addRows(data, eos.gamedata.Effect, fieldMap={'resistanceAttributeID': 'resistanceID'})
|
||||
|
||||
def processDogmaTypeEffects(eveTypesData):
|
||||
print('processing dogmatypeeffects')
|
||||
data = _readData('fsd_binary', 'typedogma', keyIdName='typeID')
|
||||
eveTypeIds = set(r['typeID'] for r in eveTypesData)
|
||||
newData = []
|
||||
for typeData in data:
|
||||
if typeData['typeID'] not in eveTypeIds:
|
||||
continue
|
||||
for row in typeData.get('dogmaEffects', ()):
|
||||
row['typeID'] = typeData['typeID']
|
||||
newData.append(row)
|
||||
_addRows(newData, eos.gamedata.ItemEffect)
|
||||
return newData
|
||||
|
||||
def processDogmaUnits():
|
||||
print('processing dogmaunits')
|
||||
data = _readData('fsd_binary', 'dogmaunits', keyIdName='unitID')
|
||||
_addRows(data, eos.gamedata.Unit, fieldMap={
|
||||
'name': 'unitName',
|
||||
'displayName_en-us': 'displayName'
|
||||
})
|
||||
|
||||
def processMarketGroups():
|
||||
print('processing marketgroups')
|
||||
data = _readData('fsd_binary', 'marketgroups', keyIdName='marketGroupID')
|
||||
map = {
|
||||
'name_en-us': 'marketGroupName',
|
||||
'description_en-us': '_description',
|
||||
}
|
||||
map.update({'name'+v: 'marketGroupName'+v for (k, v) in eos.config.translation_mapping.items() if k != 'en'})
|
||||
map.update({'description' + v: '_description' + v for (k, v) in eos.config.translation_mapping.items() if k != 'en'})
|
||||
_addRows(data, eos.gamedata.MarketGroup, fieldMap=map)
|
||||
|
||||
def processMetaGroups():
|
||||
print('processing metagroups')
|
||||
data = _readData('fsd_binary', 'metagroups', keyIdName='metaGroupID')
|
||||
map = {'name_en-us': 'metaGroupName'}
|
||||
map.update({'name' + v: 'metaGroupName' + v for (k, v) in eos.config.translation_mapping.items() if k != 'en'})
|
||||
_addRows(data, eos.gamedata.MetaGroup, fieldMap=map)
|
||||
|
||||
def processCloneGrades():
|
||||
print('processing clonegrades')
|
||||
data = _readData('fsd_lite', 'clonegrades')
|
||||
|
||||
newData = []
|
||||
# December, 2017 - CCP decided to use only one set of skill levels for alpha clones. However, this is still
|
||||
# represented in the data as a skillset per race. To ensure that all skills are the same, we store them in a way
|
||||
# that we can check to make sure all races have the same skills, as well as skill levels
|
||||
check = {}
|
||||
for ID in data:
|
||||
for skill in data[ID]['skills']:
|
||||
newData.append({
|
||||
'alphaCloneID': int(ID),
|
||||
'alphaCloneName': 'Alpha Clone',
|
||||
'typeID': skill['typeID'],
|
||||
'level': skill['level']})
|
||||
if ID not in check:
|
||||
check[ID] = {}
|
||||
check[ID][int(skill['typeID'])] = int(skill['level'])
|
||||
if not functools.reduce(lambda a, b: a if a == b else False, [v for _, v in check.items()]):
|
||||
raise Exception('Alpha Clones not all equal')
|
||||
newData = [x for x in newData if x['alphaCloneID'] == 1]
|
||||
if len(newData) == 0:
|
||||
raise Exception('Alpha Clone processing failed')
|
||||
|
||||
tmp = []
|
||||
for row in newData:
|
||||
if row['alphaCloneID'] not in tmp:
|
||||
cloneParent = eos.gamedata.AlphaClone()
|
||||
setattr(cloneParent, 'alphaCloneID', row['alphaCloneID'])
|
||||
setattr(cloneParent, 'alphaCloneName', row['alphaCloneName'])
|
||||
eos.db.gamedata_session.add(cloneParent)
|
||||
tmp.append(row['alphaCloneID'])
|
||||
_addRows(newData, eos.gamedata.AlphaCloneSkill)
|
||||
|
||||
def processTraits():
|
||||
print('processing traits')
|
||||
data = _readData('phobos', 'traits')
|
||||
|
||||
def convertSection(sectionData):
|
||||
sectionLines = []
|
||||
headerText = '<b>{}</b>'.format(sectionData['header'])
|
||||
sectionLines.append(headerText)
|
||||
for bonusData in sectionData['bonuses']:
|
||||
prefix = '{} '.format(bonusData['number']) if 'number' in bonusData else ''
|
||||
bonusText = '{}{}'.format(prefix, bonusData['text'].replace('\u00B7', '\u2022 '))
|
||||
sectionLines.append(bonusText)
|
||||
sectionLine = '<br />\n'.join(sectionLines)
|
||||
return sectionLine
|
||||
|
||||
newData = []
|
||||
for row in data:
|
||||
try:
|
||||
newRow = {
|
||||
'typeID': row['typeID'],
|
||||
}
|
||||
for (k, v) in eos.config.translation_mapping.items():
|
||||
if v == '':
|
||||
v = '_en-us'
|
||||
typeLines = []
|
||||
traitData = row['traits{}'.format(v)]
|
||||
for skillData in sorted(traitData.get('skills', ()), key=lambda i: i['header']):
|
||||
typeLines.append(convertSection(skillData))
|
||||
if 'role' in traitData:
|
||||
typeLines.append(convertSection(traitData['role']))
|
||||
if 'misc' in traitData:
|
||||
typeLines.append(convertSection(traitData['misc']))
|
||||
traitLine = '<br />\n<br />\n'.join(typeLines)
|
||||
newRow['traitText{}'.format(v)] = traitLine
|
||||
|
||||
newData.append(newRow)
|
||||
except:
|
||||
pass
|
||||
_addRows(newData, eos.gamedata.Traits, fieldMap={'traitText_en-us': 'traitText'})
|
||||
|
||||
def processMetadata():
|
||||
print('processing metadata')
|
||||
data = _readData('phobos', 'metadata')
|
||||
_addRows(data, eos.gamedata.MetaData)
|
||||
|
||||
def processReqSkills(eveTypesData):
|
||||
print('processing requiredskillsfortypes')
|
||||
|
||||
def composeReqSkills(raw):
|
||||
reqSkills = {}
|
||||
for skillTypeID, skillLevel in raw.items():
|
||||
reqSkills[int(skillTypeID)] = skillLevel
|
||||
return reqSkills
|
||||
|
||||
eveTypeIds = set(r['typeID'] for r in eveTypesData)
|
||||
data = _readData('fsd_binary', 'requiredskillsfortypes')
|
||||
reqsByItem = {}
|
||||
itemsByReq = {}
|
||||
for typeID, skillreqData in data.items():
|
||||
typeID = int(typeID)
|
||||
if typeID not in eveTypeIds:
|
||||
continue
|
||||
for skillTypeID, skillLevel in composeReqSkills(skillreqData).items():
|
||||
reqsByItem.setdefault(typeID, {})[skillTypeID] = skillLevel
|
||||
itemsByReq.setdefault(skillTypeID, {})[typeID] = skillLevel
|
||||
for item in eos.db.gamedata_session.query(eos.gamedata.Item).all():
|
||||
if item.typeID in reqsByItem:
|
||||
item.reqskills = json.dumps(reqsByItem[item.typeID])
|
||||
if item.typeID in itemsByReq:
|
||||
item.requiredfor = json.dumps(itemsByReq[item.typeID])
|
||||
|
||||
def processReplacements(eveTypesData, eveGroupsData, dogmaTypeAttributesData, dogmaTypeEffectsData):
|
||||
print('finding item replacements')
|
||||
|
||||
def compareAttrs(attrs1, attrs2):
|
||||
# Consider items as different if they have no attrs
|
||||
if len(attrs1) == 0 and len(attrs2) == 0:
|
||||
return False
|
||||
if set(attrs1) != set(attrs2):
|
||||
return False
|
||||
if all(attrs1[aid] == attrs2[aid] for aid in attrs1):
|
||||
return True
|
||||
return False
|
||||
|
||||
skillReqAttribs = {
|
||||
182: 277,
|
||||
183: 278,
|
||||
184: 279,
|
||||
1285: 1286,
|
||||
1289: 1287,
|
||||
1290: 1288}
|
||||
skillReqAttribsFlat = set(skillReqAttribs.keys()).union(skillReqAttribs.values())
|
||||
# Get data on type groups
|
||||
# Format: {type ID: group ID}
|
||||
typesGroups = {}
|
||||
for row in eveTypesData:
|
||||
typesGroups[row['typeID']] = row['groupID']
|
||||
# Get data on item effects
|
||||
# Format: {type ID: set(effect, IDs)}
|
||||
typesEffects = {}
|
||||
for row in dogmaTypeEffectsData:
|
||||
typesEffects.setdefault(row['typeID'], set()).add(row['effectID'])
|
||||
# Get data on type attributes
|
||||
# Format: {type ID: {attribute ID: attribute value}}
|
||||
typesNormalAttribs = {}
|
||||
typesSkillAttribs = {}
|
||||
for row in dogmaTypeAttributesData:
|
||||
attributeID = row['attributeID']
|
||||
if attributeID in skillReqAttribsFlat:
|
||||
typeSkillAttribs = typesSkillAttribs.setdefault(row['typeID'], {})
|
||||
typeSkillAttribs[row['attributeID']] = row['value']
|
||||
# Ignore these attributes for comparison purposes
|
||||
elif attributeID in (
|
||||
# We do not need mass as it affects final ship stats only when carried by ship itself
|
||||
# (and we're not going to replace ships), but it's wildly inconsistent for other items,
|
||||
# which otherwise would be the same
|
||||
4, # mass
|
||||
124, # mainColor
|
||||
162, # radius
|
||||
422, # techLevel
|
||||
633, # metaLevel
|
||||
1692, # metaGroupID
|
||||
1768 # typeColorScheme
|
||||
):
|
||||
continue
|
||||
else:
|
||||
typeNormalAttribs = typesNormalAttribs.setdefault(row['typeID'], {})
|
||||
typeNormalAttribs[row['attributeID']] = row['value']
|
||||
# Get data on skill requirements
|
||||
# Format: {type ID: {skill type ID: skill level}}
|
||||
typesSkillReqs = {}
|
||||
for typeID, typeAttribs in typesSkillAttribs.items():
|
||||
typeSkillAttribs = typesSkillAttribs.get(typeID, {})
|
||||
if not typeSkillAttribs:
|
||||
continue
|
||||
typeSkillReqs = typesSkillReqs.setdefault(typeID, {})
|
||||
for skillreqTypeAttr, skillreqLevelAttr in skillReqAttribs.items():
|
||||
try:
|
||||
skillType = int(typeSkillAttribs[skillreqTypeAttr])
|
||||
skillLevel = int(typeSkillAttribs[skillreqLevelAttr])
|
||||
except (KeyError, ValueError):
|
||||
continue
|
||||
typeSkillReqs[skillType] = skillLevel
|
||||
# Format: {group ID: category ID}
|
||||
groupCategories = {}
|
||||
for row in eveGroupsData:
|
||||
groupCategories[row['groupID']] = row['categoryID']
|
||||
# As EVE affects various types mostly depending on their group or skill requirements,
|
||||
# we're going to group various types up this way
|
||||
# Format: {(group ID, frozenset(skillreq, type, IDs), frozenset(type, effect, IDs): [type ID, {attribute ID: attribute value}]}
|
||||
groupedData = {}
|
||||
for row in eveTypesData:
|
||||
typeID = row['typeID']
|
||||
# Ignore items outside of categories we need
|
||||
if groupCategories[typesGroups[typeID]] not in (
|
||||
6, # Ship
|
||||
7, # Module
|
||||
8, # Charge
|
||||
18, # Drone
|
||||
20, # Implant
|
||||
22, # Deployable
|
||||
23, # Starbase
|
||||
32, # Subsystem
|
||||
35, # Decryptors
|
||||
65, # Structure
|
||||
66, # Structure Module
|
||||
87, # Fighter
|
||||
):
|
||||
continue
|
||||
typeAttribs = typesNormalAttribs.get(typeID, {})
|
||||
# Ignore items w/o attributes
|
||||
if not typeAttribs:
|
||||
continue
|
||||
# We need only skill types, not levels for keys
|
||||
typeSkillreqs = frozenset(typesSkillReqs.get(typeID, {}))
|
||||
typeGroup = typesGroups[typeID]
|
||||
typeEffects = frozenset(typesEffects.get(typeID, ()))
|
||||
groupData = groupedData.setdefault((typeGroup, typeSkillreqs, typeEffects), [])
|
||||
groupData.append((typeID, typeAttribs))
|
||||
# Format: {type ID: set(type IDs)}
|
||||
replacements = {}
|
||||
# Now, go through composed groups and for every item within it
|
||||
# find items which are the same
|
||||
for groupData in groupedData.values():
|
||||
for type1, type2 in itertools.combinations(groupData, 2):
|
||||
if compareAttrs(type1[1], type2[1]):
|
||||
replacements.setdefault(type1[0], set()).add(type2[0])
|
||||
replacements.setdefault(type2[0], set()).add(type1[0])
|
||||
# Update DB session with data we generated
|
||||
for item in eos.db.gamedata_session.query(eos.gamedata.Item).all():
|
||||
itemReplacements = replacements.get(item.typeID)
|
||||
if itemReplacements is not None:
|
||||
item.replacements = ','.join('{}'.format(tid) for tid in sorted(itemReplacements))
|
||||
|
||||
def processImplantSets(eveTypesData):
|
||||
print('composing implant sets')
|
||||
# Includes only implants which can be considered part of sets, not all implants
|
||||
implant_groups = (300, 1730)
|
||||
specials = {'Genolution': ('Genolution Core Augmentation', r'CA-\d+')}
|
||||
implantSets = {}
|
||||
for row in eveTypesData:
|
||||
if not row.get('published'):
|
||||
continue
|
||||
if row.get('groupID') not in implant_groups:
|
||||
continue
|
||||
typeName = row.get('typeName_en-us', '')
|
||||
# Regular sets matching
|
||||
m = re.match('(?P<grade>(High|Mid|Low)-grade) (?P<set>\w+) (?P<implant>(Alpha|Beta|Gamma|Delta|Epsilon|Omega))', typeName, re.IGNORECASE)
|
||||
if m:
|
||||
implantSets.setdefault((m.group('grade'), m.group('set')), set()).add(row['typeID'])
|
||||
# Special set matching
|
||||
for setHandle, (setName, implantPattern) in specials.items():
|
||||
pattern = '(?P<set>{}) (?P<implant>{})'.format(setName, implantPattern)
|
||||
m = re.match(pattern, typeName)
|
||||
if m:
|
||||
implantSets.setdefault((None, setHandle), set()).add(row['typeID'])
|
||||
break
|
||||
data = []
|
||||
for (gradeName, setName), implants in implantSets.items():
|
||||
if len(implants) < 2:
|
||||
continue
|
||||
implants = ','.join('{}'.format(tid) for tid in sorted(implants))
|
||||
row = {'setName': setName, 'gradeName': gradeName, 'implants': implants}
|
||||
data.append(row)
|
||||
_addRows(data, eos.gamedata.ImplantSet)
|
||||
|
||||
eveTypesData = processEveTypes()
|
||||
eveGroupsData = processEveGroups()
|
||||
processEveCategories()
|
||||
processDogmaAttributes()
|
||||
dogmaTypeAttributesData = processDogmaTypeAttributes(eveTypesData)
|
||||
processDynamicItemAttributes()
|
||||
processDogmaEffects()
|
||||
dogmaTypeEffectsData = processDogmaTypeEffects(eveTypesData)
|
||||
processDogmaUnits()
|
||||
processMarketGroups()
|
||||
processMetaGroups()
|
||||
processCloneGrades()
|
||||
processTraits()
|
||||
processMetadata()
|
||||
|
||||
eos.db.gamedata_session.flush()
|
||||
processReqSkills(eveTypesData)
|
||||
processReplacements(eveTypesData, eveGroupsData, dogmaTypeAttributesData, dogmaTypeEffectsData)
|
||||
processImplantSets(eveTypesData)
|
||||
|
||||
# Add schema version to prevent further updates
|
||||
metadata_schema_version = eos.gamedata.MetaData()
|
||||
metadata_schema_version.field_name = 'schema_version'
|
||||
metadata_schema_version.field_value = GAMEDATA_SCHEMA_VERSION
|
||||
eos.db.gamedata_session.add(metadata_schema_version)
|
||||
|
||||
eos.db.gamedata_session.flush()
|
||||
|
||||
# CCP still has 5 subsystems assigned to T3Cs, even though only 4 are available / usable. They probably have some
|
||||
# old legacy requirement or assumption that makes it difficult for them to change this value in the data. But for
|
||||
# pyfa, we can do it here as a post-processing step
|
||||
for attr in eos.db.gamedata_session.query(eos.gamedata.Attribute).filter(eos.gamedata.Attribute.ID == 1367).all():
|
||||
attr.value = 4.0
|
||||
for item in eos.db.gamedata_session.query(eos.gamedata.Item).filter(or_(
|
||||
eos.gamedata.Item.name.like('%abyssal%'),
|
||||
eos.gamedata.Item.name.like('%mutated%'),
|
||||
eos.gamedata.Item.name.like('%_PLACEHOLDER%'),
|
||||
# Drifter weapons are published for some reason
|
||||
eos.gamedata.Item.name.in_(('Lux Kontos', 'Lux Xiphos'))
|
||||
)).all():
|
||||
if 'Asteroid Mining Crystal' in item.name:
|
||||
continue
|
||||
if 'Mutated Drone Specialization' in item.name:
|
||||
continue
|
||||
item.published = False
|
||||
|
||||
for x in [
|
||||
30 # Apparel
|
||||
]:
|
||||
cat = eos.db.gamedata_session.query(eos.gamedata.Category).filter(eos.gamedata.Category.ID == x).first()
|
||||
print ('Removing Category: {}'.format(cat.name))
|
||||
eos.db.gamedata_session.delete(cat)
|
||||
|
||||
# Unused normally, can be useful for customizing items
|
||||
def _copyItem(srcName, tgtTypeID, tgtName):
|
||||
eveType = eos.db.gamedata_session.query(eos.gamedata.Item).filter(eos.gamedata.Item.name == srcName).one()
|
||||
eos.db.gamedata_session.expunge(eveType)
|
||||
sqlalchemy.orm.make_transient(eveType)
|
||||
eveType.ID = tgtTypeID
|
||||
for suffix in eos.config.translation_mapping.values():
|
||||
setattr(eveType, f'typeName{suffix}', tgtName)
|
||||
eos.db.gamedata_session.add(eveType)
|
||||
eos.db.gamedata_session.flush()
|
||||
|
||||
def _hardcodeAttribs(typeID, attrMap):
|
||||
for attrName, value in attrMap.items():
|
||||
try:
|
||||
attr = eos.db.gamedata_session.query(eos.gamedata.Attribute).filter(and_(
|
||||
eos.gamedata.Attribute.name == attrName, eos.gamedata.Attribute.typeID == typeID)).one()
|
||||
except sqlalchemy.orm.exc.NoResultFound:
|
||||
attrInfo = eos.db.gamedata_session.query(eos.gamedata.AttributeInfo).filter(eos.gamedata.AttributeInfo.name == attrName).one()
|
||||
attr = eos.gamedata.Attribute()
|
||||
attr.attributeID = attrInfo.ID
|
||||
attr.typeID = typeID
|
||||
attr.value = value
|
||||
eos.db.gamedata_session.add(attr)
|
||||
else:
|
||||
attr.value = value
|
||||
|
||||
def _hardcodeEffects(typeID, effectMap):
|
||||
item = eos.db.gamedata_session.query(eos.gamedata.Item).filter(eos.gamedata.Item.ID == typeID).one()
|
||||
item.effects.clear()
|
||||
for effectID, effectName in effectMap.items():
|
||||
effect = eos.gamedata.Effect()
|
||||
effect.effectID = effectID
|
||||
effect.effectName = effectName
|
||||
item.effects[effectName] = effect
|
||||
|
||||
def hardcodeShapash():
|
||||
shapashTypeID = 1000000
|
||||
_copyItem(srcName='Utu', tgtTypeID=shapashTypeID, tgtName='Shapash')
|
||||
attrMap = {
|
||||
# Fitting
|
||||
'powerOutput': 50,
|
||||
'cpuOutput': 225,
|
||||
'capacitorCapacity': 420,
|
||||
'rechargeRate': 187500,
|
||||
# Slots
|
||||
'hiSlots': 3,
|
||||
'medSlots': 4,
|
||||
'lowSlots': 4,
|
||||
'launcherSlotsLeft': 0,
|
||||
'turretSlotsLeft': 3,
|
||||
# Rigs
|
||||
'rigSlots': 2,
|
||||
'rigSize': 1,
|
||||
'upgradeCapacity': 400,
|
||||
# Shield
|
||||
'shieldCapacity': 575,
|
||||
'shieldRechargeRate': 625000,
|
||||
'shieldEmDamageResonance': 1 - 0.0,
|
||||
'shieldThermalDamageResonance': 1 - 0.6,
|
||||
'shieldKineticDamageResonance': 1 - 0.85,
|
||||
'shieldExplosiveDamageResonance': 1 - 0.5,
|
||||
# Armor
|
||||
'armorHP': 1015,
|
||||
'armorEmDamageResonance': 1 - 0.5,
|
||||
'armorThermalDamageResonance': 1 - 0.675,
|
||||
'armorKineticDamageResonance': 1 - 0.8375,
|
||||
'armorExplosiveDamageResonance': 1 - 0.1,
|
||||
# Structure
|
||||
'hp': 1274,
|
||||
'emDamageResonance': 1 - 0.33,
|
||||
'thermalDamageResonance': 1 - 0.33,
|
||||
'kineticDamageResonance': 1 - 0.33,
|
||||
'explosiveDamageResonance': 1 - 0.33,
|
||||
'mass': 1215000,
|
||||
'volume': 29500,
|
||||
'capacity': 165,
|
||||
# Navigation
|
||||
'maxVelocity': 325,
|
||||
'agility': 3.467,
|
||||
'warpSpeedMultiplier': 5.5,
|
||||
# Drones
|
||||
'droneCapacity': 75,
|
||||
'droneBandwidth': 25,
|
||||
# Targeting
|
||||
'maxTargetRange': 49000,
|
||||
'maxLockedTargets': 6,
|
||||
'scanRadarStrength': 0,
|
||||
'scanLadarStrength': 0,
|
||||
'scanMagnetometricStrength': 9,
|
||||
'scanGravimetricStrength': 0,
|
||||
'signatureRadius': 39,
|
||||
'scanResolution': 550,
|
||||
# Misc
|
||||
'energyWarfareResistance': 0,
|
||||
'stasisWebifierResistance': 0,
|
||||
'weaponDisruptionResistance': 0}
|
||||
effectMap = {
|
||||
100100: 'pyfaCustomShapashAfArAmount',
|
||||
100101: 'pyfaCustomShapashAfShtTrackingOptimal',
|
||||
100102: 'pyfaCustomShapashGfShtDamage',
|
||||
100103: 'pyfaCustomShapashGfPointRange',
|
||||
100104: 'pyfaCustomShapashGfPropOverheat',
|
||||
100105: 'pyfaCustomShapashRolePlateMass',
|
||||
100106: 'pyfaCustomShapashRoleHeat'}
|
||||
_hardcodeAttribs(shapashTypeID, attrMap)
|
||||
_hardcodeEffects(shapashTypeID, effectMap)
|
||||
|
||||
def hardcodeCybele():
|
||||
cybeleTypeID = 1000001
|
||||
_copyItem(srcName='Adrestia', tgtTypeID=cybeleTypeID, tgtName='Cybele')
|
||||
attrMap = {
|
||||
# Fitting
|
||||
'powerOutput': 1284,
|
||||
'cpuOutput': 400,
|
||||
'capacitorCapacity': 2400,
|
||||
'rechargeRate': 334000,
|
||||
'hiSlots': 5,
|
||||
'medSlots': 4,
|
||||
'lowSlots': 6,
|
||||
'launcherSlotsLeft': 0,
|
||||
'turretSlotsLeft': 5,
|
||||
# Rigs
|
||||
'rigSlots': 2,
|
||||
'rigSize': 2,
|
||||
'upgradeCapacity': 400,
|
||||
# Shield
|
||||
'shieldCapacity': 1200,
|
||||
'shieldRechargeRate': 1250000,
|
||||
'shieldEmDamageResonance': 1 - 0.0,
|
||||
'shieldThermalDamageResonance': 1 - 0.5,
|
||||
'shieldKineticDamageResonance': 1 - 0.9,
|
||||
'shieldExplosiveDamageResonance': 1 - 0.5,
|
||||
# Armor
|
||||
'armorHP': 1900,
|
||||
'armorEmDamageResonance': 1 - 0.5,
|
||||
'armorThermalDamageResonance': 1 - 0.69,
|
||||
'armorKineticDamageResonance': 1 - 0.85,
|
||||
'armorExplosiveDamageResonance': 1 - 0.1,
|
||||
# Structure
|
||||
'hp': 2300,
|
||||
'emDamageResonance': 1 - 0.33,
|
||||
'thermalDamageResonance': 1 - 0.33,
|
||||
'kineticDamageResonance': 1 - 0.33,
|
||||
'explosiveDamageResonance': 1 - 0.33,
|
||||
'mass': 11100000,
|
||||
'volume': 112000,
|
||||
'capacity': 450,
|
||||
# Navigation
|
||||
'maxVelocity': 235,
|
||||
'agility': 0.457,
|
||||
'warpSpeedMultiplier': 4.5,
|
||||
# Drones
|
||||
'droneCapacity': 100,
|
||||
'droneBandwidth': 50,
|
||||
# Targeting
|
||||
'maxTargetRange': 60000,
|
||||
'maxLockedTargets': 6,
|
||||
'scanRadarStrength': 0,
|
||||
'scanLadarStrength': 0,
|
||||
'scanMagnetometricStrength': 15,
|
||||
'scanGravimetricStrength': 0,
|
||||
'signatureRadius': 115,
|
||||
'scanResolution': 330,
|
||||
# Misc
|
||||
'energyWarfareResistance': 0,
|
||||
'stasisWebifierResistance': 0,
|
||||
'weaponDisruptionResistance': 0}
|
||||
effectMap = {
|
||||
100200: 'pyfaCustomCybeleHacMhtFalloff',
|
||||
100201: 'pyfaCustomCybeleHacMhtTracking',
|
||||
100202: 'pyfaCustomCybeleGcMhtDamage',
|
||||
100203: 'pyfaCustomCybeleGcArAmount',
|
||||
100204: 'pyfaCustomCybeleGcPointRange',
|
||||
100205: 'pyfaCustomCybeleRoleVelocity',
|
||||
100206: 'pyfaCustomCybeleRolePlateMass'}
|
||||
_hardcodeAttribs(cybeleTypeID, attrMap)
|
||||
_hardcodeEffects(cybeleTypeID, effectMap)
|
||||
|
||||
# hardcodeShapash()
|
||||
# hardcodeCybele()
|
||||
|
||||
eos.db.gamedata_session.commit()
|
||||
eos.db.gamedata_engine.execute('VACUUM')
|
||||
|
||||
print('done')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
update_db()
|
||||
@@ -1,55 +0,0 @@
|
||||
version: 1
|
||||
|
||||
AppDir:
|
||||
path: ./AppDir
|
||||
|
||||
app_info:
|
||||
id: pyfa
|
||||
name: pyfa
|
||||
icon: pyfa
|
||||
version: '{{PYFA_VERSION}}'
|
||||
exec: usr/bin/python3.11
|
||||
exec_args: "-s $APPDIR/opt/pyfa/pyfa.py $@"
|
||||
|
||||
apt:
|
||||
arch: [ amd64 ]
|
||||
sources:
|
||||
- sourceline: 'deb http://us.archive.ubuntu.com/ubuntu jammy main restricted universe multiverse'
|
||||
key_url: 'http://keyserver.ubuntu.com/pks/lookup?op=get&search=0x871920d1991bc93c'
|
||||
- sourceline: 'deb http://us.archive.ubuntu.com/ubuntu jammy-updates main restricted universe multiverse'
|
||||
key_url: 'http://keyserver.ubuntu.com/pks/lookup?op=get&search=0x871920d1991bc93c'
|
||||
- sourceline: 'deb http://us.archive.ubuntu.com/ubuntu jammy-backports main restricted universe multiverse'
|
||||
key_url: 'http://keyserver.ubuntu.com/pks/lookup?op=get&search=0x871920d1991bc93c'
|
||||
- sourceline: 'deb http://us.archive.ubuntu.com/ubuntu jammy-security main restricted universe multiverse'
|
||||
key_url: 'http://keyserver.ubuntu.com/pks/lookup?op=get&search=0x871920d1991bc93c'
|
||||
- sourceline: 'deb https://ppa.launchpadcontent.net/deadsnakes/ppa/ubuntu jammy main'
|
||||
key_url: 'http://keyserver.ubuntu.com/pks/lookup?op=get&search=0xf23c5a6cf475977595c89f51ba6932366a755776'
|
||||
|
||||
include:
|
||||
- python3.11
|
||||
# wx dependencies
|
||||
- libgtk-3-0
|
||||
- libwebkit2gtk-4.0-37 # Needed for wx's HTML lib
|
||||
- librsvg2-common # Without it, search in char editor crashes due to inability to load some images
|
||||
exclude: []
|
||||
|
||||
after_bundle:
|
||||
# Install python dependencies to bundled interpreter
|
||||
- export PYTHONHOME="AppDir/usr"
|
||||
- export PYTHONPATH="AppDir/usr/lib/python3.11/site-packages"
|
||||
- curl -L https://bootstrap.pypa.io/get-pip.py -o get-pip.py
|
||||
- AppDir/usr/bin/python3.11 get-pip.py
|
||||
# Just to bundle certificates with AppImage
|
||||
- AppDir/usr/bin/python3.11 -s -m pip install certifi
|
||||
- AppDir/usr/bin/python3.11 -s -m pip install -f https://extras.wxpython.org/wxPython4/extras/linux/gtk3/ubuntu-22.04 -r requirements.txt
|
||||
|
||||
runtime:
|
||||
env:
|
||||
PYTHONHOME: '${APPDIR}/usr'
|
||||
PYTHONPATH: '${APPDIR}/usr/lib/python3.11/site-packages'
|
||||
SSL_CERT_FILE: '${APPDIR}/usr/lib/python3.11/site-packages/certifi/cacert.pem'
|
||||
|
||||
AppImage:
|
||||
sign-key: None
|
||||
arch: x86_64
|
||||
file_name: 'pyfa-{{PYFA_VERSION}}-linux.AppImage'
|
||||
@@ -1,19 +0,0 @@
|
||||
#! /bin/bash -i
|
||||
|
||||
# Export APPRUN if running from an extracted image
|
||||
self="$(readlink -f -- $0)"
|
||||
here="${self%/*}"
|
||||
APPDIR="${APPDIR:-${here}}"
|
||||
|
||||
# Export TCl/Tk
|
||||
export TCL_LIBRARY="${APPDIR}/usr/share/tcltk/tcl8.4"
|
||||
export TK_LIBRARY="${APPDIR}/usr/share/tcltk/tk8.4"
|
||||
export TKPATH="${TK_LIBRARY}"
|
||||
|
||||
# Export SSL certificate
|
||||
export SSL_CERT_FILE="${APPDIR}/opt/_internal/certs.pem"
|
||||
|
||||
# Call the entry point
|
||||
#! /bin/bash -i
|
||||
|
||||
${APPDIR}/usr/bin/pyfa "$@"
|
||||
@@ -1,3 +0,0 @@
|
||||
#! /bin/bash
|
||||
|
||||
${APPDIR}/usr/bin/python3 -s "${APPDIR}/opt/pyfa/pyfa.py" "$@"
|
||||
@@ -1,17 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<component type="desktop-application">
|
||||
<id>org.pyfa.pyfa</id>
|
||||
<metadata_license>MIT</metadata_license>
|
||||
<project_license>GPL-3</project_license>
|
||||
<name>Pyfa</name>
|
||||
<summary>Pyfa </summary>
|
||||
<description>
|
||||
<p> Python Fitting Assitant for EVE Online
|
||||
</p>
|
||||
</description>
|
||||
<launchable type="desktop-id">pyfa.desktop</launchable>
|
||||
<url type="homepage">https://github.com/pyfa-org/Pyfa</url>
|
||||
<provides>
|
||||
<binary>pyfa</binary>
|
||||
</provides>
|
||||
</component>
|
||||
@@ -1,7 +0,0 @@
|
||||
[Desktop Entry]
|
||||
Type=Application
|
||||
Name=Pyfa
|
||||
Exec=pyfa
|
||||
Comment=Python Fitting Assistant for EVE: Online
|
||||
Icon=python
|
||||
Categories=Game;
|
||||
@@ -1,95 +0,0 @@
|
||||
# -*- mode: python -*-
|
||||
|
||||
import os
|
||||
from itertools import chain
|
||||
import subprocess
|
||||
import requests.certs
|
||||
|
||||
label = subprocess.check_output([
|
||||
"git", "describe", "--tags"]).strip()
|
||||
|
||||
with open('.version', 'w+') as f:
|
||||
f.write(label.decode())
|
||||
|
||||
block_cipher = None
|
||||
|
||||
added_files = [
|
||||
('../../imgs/gui/*.png', 'imgs/gui'),
|
||||
('../../imgs/gui/*.gif', 'imgs/gui'),
|
||||
('../../imgs/icons/*.png', 'imgs/icons'),
|
||||
('../../imgs/renders/*.png', 'imgs/renders'),
|
||||
('../../dist_assets/win/pyfa.ico', '.'),
|
||||
('../../service/jargon/*.yaml', 'service/jargon'),
|
||||
('../../locale', 'locale'),
|
||||
(requests.certs.where(), '.'), # is this needed anymore?
|
||||
('../../eve.db', '.'),
|
||||
('../../README.md', '.'),
|
||||
('../../LICENSE', '.'),
|
||||
('../../version.yml', '.'),
|
||||
]
|
||||
|
||||
|
||||
import_these = [
|
||||
'numpy.core._dtype_ctypes', # https://github.com/pyinstaller/pyinstaller/issues/3982
|
||||
'sqlalchemy.ext.baked', # windows build doesn't launch without if when using sqlalchemy 1.3.x
|
||||
'pkg_resources.py2_warn' # issue 2156
|
||||
]
|
||||
|
||||
icon = os.path.join(os.getcwd(), "dist_assets", "mac", "pyfa.icns")
|
||||
|
||||
# Walk directories that do dynamic importing
|
||||
paths = ('eos/db/migrations', 'service/conversions')
|
||||
for root, folders, files in chain.from_iterable(os.walk(path) for path in paths):
|
||||
for file_ in files:
|
||||
if file_.endswith(".py") and not file_.startswith("_"):
|
||||
mod_name = "{}.{}".format(
|
||||
root.replace("/", "."),
|
||||
file_.split(".py")[0],
|
||||
)
|
||||
import_these.append(mod_name)
|
||||
|
||||
a = Analysis([r'../../pyfa.py'],
|
||||
pathex=[],
|
||||
binaries=[],
|
||||
datas=added_files,
|
||||
hiddenimports=import_these,
|
||||
hookspath=['dist_assets/pyinstaller_hooks'],
|
||||
runtime_hooks=[],
|
||||
excludes=[],
|
||||
win_no_prefer_redirects=False,
|
||||
win_private_assemblies=False,
|
||||
cipher=block_cipher)
|
||||
|
||||
pyz = PYZ(a.pure, a.zipped_data,
|
||||
cipher=block_cipher)
|
||||
|
||||
exe = EXE(pyz,
|
||||
a.scripts,
|
||||
a.binaries,
|
||||
a.zipfiles,
|
||||
a.datas,
|
||||
name='pyfa',
|
||||
debug=False,
|
||||
strip=False,
|
||||
upx=True,
|
||||
runtime_tmpdir=None,
|
||||
console=False ,
|
||||
icon=icon,
|
||||
)
|
||||
|
||||
app = BUNDLE(
|
||||
exe,
|
||||
name='pyfa.app',
|
||||
version=os.getenv('PYFA_VERSION'),
|
||||
icon=icon,
|
||||
bundle_identifier=None,
|
||||
info_plist={
|
||||
'NSHighResolutionCapable': 'True',
|
||||
'NSPrincipalClass': 'NSApplication',
|
||||
'CFBundleName': 'pyfa',
|
||||
'CFBundleDisplayName': 'pyfa',
|
||||
'CFBundleIdentifier': 'org.pyfaorg.pyfa',
|
||||
'CFBundleVersion': os.getenv('PYFA_VERSION'),
|
||||
'CFBundleShortVersionString': os.getenv('PYFA_VERSION'),
|
||||
}
|
||||
)
|
||||
@@ -1,37 +0,0 @@
|
||||
# helper script to zip up pyinstaller distribution and create installer file
|
||||
|
||||
import os.path
|
||||
from subprocess import call
|
||||
import zipfile
|
||||
from packaging.version import Version
|
||||
import yaml
|
||||
|
||||
|
||||
with open("version.yml", 'r') as file:
|
||||
data = yaml.load(file, Loader=yaml.SafeLoader)
|
||||
version = data['version']
|
||||
|
||||
os.environ["PYFA_DIST_DIR"] = os.path.join(os.getcwd(), 'dist')
|
||||
|
||||
os.environ["PYFA_VERSION"] = version
|
||||
iscc = "C:\Program Files (x86)\Inno Setup 6\ISCC.exe"
|
||||
|
||||
source = os.path.join(os.environ["PYFA_DIST_DIR"], "pyfa")
|
||||
|
||||
fileName = "pyfa-{}-win".format(os.environ["PYFA_VERSION"])
|
||||
|
||||
print("Compiling EXE")
|
||||
|
||||
v = Version(version)
|
||||
|
||||
print(v)
|
||||
|
||||
call([
|
||||
iscc,
|
||||
os.path.join(os.getcwd(), "dist_assets", "win", "pyfa-setup.iss"),
|
||||
"/dMyAppVersion=%s" % v,
|
||||
"/dMyAppDir=%s" % source,
|
||||
"/dMyOutputDir=%s" % os.path.join(os.getcwd()),
|
||||
"/dMyOutputFile=%s" % fileName]) # stdout=devnull, stderr=devnull
|
||||
|
||||
print("Done")
|
||||
@@ -1,187 +0,0 @@
|
||||
; Script generated by the Inno Setup Script Wizard.
|
||||
; SEE THE DOCUMENTATION FOR DETAILS ON CREATING INNO SETUP SCRIPT FILES!
|
||||
|
||||
; Versioning
|
||||
; we do some #ifdef conditionals because automated compilation passes these as arguments
|
||||
|
||||
#ifndef MyAppVersion
|
||||
#define MyAppVersion "2.1.0"
|
||||
#endif
|
||||
|
||||
; Other config
|
||||
|
||||
#define MyAppName "pyfa"
|
||||
#define MyAppPublisher "pyfa"
|
||||
#define MyAppURL "https://github.com/pyfa-org/Pyfa/"
|
||||
#define MyAppExeName "pyfa.exe"
|
||||
|
||||
#ifndef MyOutputFile
|
||||
#define MyOutputFile LowerCase(StringChange(MyAppName+'-'+MyAppVersion+'-win', " ", "-"))
|
||||
#endif
|
||||
#ifndef MyAppDir
|
||||
#define MyAppDir "pyfa"
|
||||
#endif
|
||||
#ifndef MyOutputDir
|
||||
#define MyOutputDir "dist"
|
||||
#endif
|
||||
|
||||
[Setup]
|
||||
; NOTE: The value of AppId uniquely identifies this application.
|
||||
; Do not use the same AppId value in installers for other applications.
|
||||
; (To generate a new GUID, click Tools | Generate GUID inside the IDE.)
|
||||
AppId={{3DA39096-C08D-49CD-90E0-1D177F32C8AA}
|
||||
AppName={#MyAppName}
|
||||
AppVersion={#MyAppVersion}
|
||||
AppPublisher={#MyAppPublisher}
|
||||
AppPublisherURL={#MyAppURL}
|
||||
AppSupportURL={#MyAppURL}
|
||||
AppUpdatesURL={#MyAppURL}
|
||||
ArchitecturesAllowed=x64
|
||||
ArchitecturesInstallIn64BitMode=x64
|
||||
CloseApplications=yes
|
||||
DefaultDirName={pf}\{#MyAppName}
|
||||
DefaultGroupName={#MyAppName}
|
||||
AllowNoIcons=yes
|
||||
LicenseFile={#MyAppDir}\app\LICENSE
|
||||
OutputDir={#MyOutputDir}
|
||||
OutputBaseFilename={#MyOutputFile}
|
||||
SetupIconFile={#MyAppDir}\app\pyfa.ico
|
||||
SolidCompression=yes
|
||||
|
||||
[Languages]
|
||||
Name: "english"; MessagesFile: "compiler:Default.isl"
|
||||
|
||||
[Tasks]
|
||||
Name: "desktopicon"; Description: "{cm:CreateDesktopIcon}"; GroupDescription: "{cm:AdditionalIcons}"; Flags: unchecked
|
||||
Name: "quicklaunchicon"; Description: "{cm:CreateQuickLaunchIcon}"; GroupDescription: "{cm:AdditionalIcons}"; Flags: unchecked; OnlyBelowVersion: 0,6.1
|
||||
|
||||
[Files]
|
||||
Source: "{#MyAppDir}\pyfa.exe"; DestDir: "{app}"; Flags: ignoreversion; AfterInstall: RemoveFromVirtualStore
|
||||
Source: "{#MyAppDir}\*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs createallsubdirs
|
||||
; NOTE: Don't use "Flags: ignoreversion" on any shared system files
|
||||
|
||||
[Icons]
|
||||
Name: "{group}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"
|
||||
Name: "{group}\{cm:UninstallProgram,{#MyAppName}}"; Filename: "{uninstallexe}"
|
||||
Name: "{commondesktop}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; Tasks: desktopicon
|
||||
Name: "{userappdata}\Microsoft\Internet Explorer\Quick Launch\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; Tasks: quicklaunchicon
|
||||
|
||||
[Run]
|
||||
Filename: "{app}\{#MyAppExeName}"; Description: "{cm:LaunchProgram,{#StringChange(MyAppName, '&', '&&')}}"; Flags: nowait postinstall skipifsilent
|
||||
|
||||
[InstallDelete]
|
||||
; These will delete left over generated files from 1.14 and below
|
||||
Type: filesandordirs; Name: "{app}\eos"
|
||||
Type: filesandordirs; Name: "{app}\gui"
|
||||
Type: filesandordirs; Name: "{app}\service"
|
||||
Type: filesandordirs; Name: "{app}\utils"
|
||||
Type: files; Name: "{app}\*.pyo"
|
||||
Type: files; Name: "{app}\*.pyc"
|
||||
|
||||
[Code]
|
||||
|
||||
/////////////////////////////////////////////////////////////////////
|
||||
function IsAppRunning(const FileName : string): Boolean;
|
||||
var
|
||||
FSWbemLocator: Variant;
|
||||
FWMIService : Variant;
|
||||
FWbemObjectSet: Variant;
|
||||
begin
|
||||
Result := false;
|
||||
FSWbemLocator := CreateOleObject('WBEMScripting.SWBEMLocator');
|
||||
FWMIService := FSWbemLocator.ConnectServer('', 'root\CIMV2', '', '');
|
||||
FWbemObjectSet := FWMIService.ExecQuery(Format('SELECT Name FROM Win32_Process Where Name="%s"',[FileName]));
|
||||
Result := (FWbemObjectSet.Count > 0);
|
||||
FWbemObjectSet := Unassigned;
|
||||
FWMIService := Unassigned;
|
||||
FSWbemLocator := Unassigned;
|
||||
end;
|
||||
|
||||
/////////////////////////////////////////////////////////////////////
|
||||
procedure RemoveFromVirtualStore;
|
||||
var
|
||||
VirtualStore,FileName,FilePath:String;
|
||||
DriveChars:Integer;
|
||||
begin
|
||||
VirtualStore:=AddBackslash(ExpandConstant('{localappdata}'))+'VirtualStore';
|
||||
FileName:=ExpandConstant(CurrentFileName);
|
||||
DriveChars:=Length(ExtractFileDrive(FileName));
|
||||
if DriveChars>0 then begin
|
||||
Delete(FileName,1,DriveChars);
|
||||
FileName:=VirtualStore+FileName;
|
||||
FilePath:=ExtractFilePath(FileName);
|
||||
DelTree(FilePath, True, True, True);
|
||||
end;
|
||||
end;
|
||||
|
||||
/////////////////////////////////////////////////////////////////////
|
||||
function PrepareToInstall(var NeedsRestart: Boolean): String;
|
||||
begin
|
||||
if(IsAppRunning( 'pyfa.exe' )) then
|
||||
begin
|
||||
Result := 'Please close pyfa before continuing. When closed, please go back to the previous step and continue. If you have named this installer pyfa.exe, please rename it and restart installation';
|
||||
end
|
||||
else
|
||||
begin
|
||||
Result := '';
|
||||
end
|
||||
end;
|
||||
|
||||
/////////////////////////////////////////////////////////////////////
|
||||
function GetUninstallString(): String;
|
||||
var
|
||||
sUnInstPath: String;
|
||||
sUnInstallString: String;
|
||||
begin
|
||||
sUnInstPath := ExpandConstant('Software\Microsoft\Windows\CurrentVersion\Uninstall\{{3DA39096-C08D-49CD-90E0-1D177F32C8AA}_is1'); //Your App GUID/ID
|
||||
sUnInstallString := '';
|
||||
if not RegQueryStringValue(HKLM, sUnInstPath, 'UninstallString', sUnInstallString) then
|
||||
if not RegQueryStringValue(HKCU, sUnInstPath, 'UninstallString', sUnInstallString) then
|
||||
if not RegQueryStringValue(HKLM32, sUnInstPath, 'UninstallString', sUnInstallString) then
|
||||
RegQueryStringValue(HKCU32, sUnInstPath, 'UninstallString', sUnInstallString);
|
||||
Result := sUnInstallString;
|
||||
end;
|
||||
|
||||
/////////////////////////////////////////////////////////////////////
|
||||
function UnInstallOldVersion(): Integer;
|
||||
var
|
||||
sUnInstallString: String;
|
||||
iResultCode: Integer;
|
||||
begin
|
||||
// Return Values:
|
||||
// 1 - uninstall string is empty
|
||||
// 2 - error executing the UnInstallString
|
||||
// 3 - successfully executed the UnInstallString
|
||||
|
||||
// default return value
|
||||
Result := 0;
|
||||
|
||||
// get the uninstall string of the old app
|
||||
sUnInstallString := GetUninstallString();
|
||||
if sUnInstallString <> '' then begin
|
||||
sUnInstallString := RemoveQuotes(sUnInstallString);
|
||||
if Exec(sUnInstallString, '/SILENT /NORESTART /SUPPRESSMSGBOXES','', SW_HIDE, ewWaitUntilTerminated, iResultCode) then
|
||||
Result := 3
|
||||
else
|
||||
Result := 2;
|
||||
end else
|
||||
Result := 1;
|
||||
end;
|
||||
|
||||
/////////////////////////////////////////////////////////////////////
|
||||
function IsUpgrade(): Boolean;
|
||||
begin
|
||||
Result := (GetUninstallString() <> '');
|
||||
end;
|
||||
|
||||
/////////////////////////////////////////////////////////////////////
|
||||
procedure CurStepChanged(CurStep: TSetupStep);
|
||||
begin
|
||||
if (CurStep=ssInstall) then
|
||||
begin
|
||||
if (IsUpgrade()) then
|
||||
begin
|
||||
UnInstallOldVersion();
|
||||
end;
|
||||
end;
|
||||
end;
|
||||
@@ -1,20 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
|
||||
<assemblyIdentity name="pyfa" processorArchitecture="x86" type="win32" version="1.0.0.0"/>
|
||||
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
|
||||
<security>
|
||||
<requestedPrivileges>
|
||||
<requestedExecutionLevel level="asInvoker" uiAccess="false"></requestedExecutionLevel>
|
||||
</requestedPrivileges>
|
||||
</security>
|
||||
</trustInfo>
|
||||
<compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">
|
||||
<application>
|
||||
<supportedOS Id="{e2011457-1546-43c5-a5fe-008deee3d3f0}"/>
|
||||
<supportedOS Id="{35138b9a-5d96-4fbd-8e2d-a2440225f93a}"/>
|
||||
<supportedOS Id="{4a2f28e3-53b9-4441-ba9c-d69d4a4a6e38}"/>
|
||||
<supportedOS Id="{1f676c76-80e1-4239-95bb-83d0f6d0da78}"/>
|
||||
<supportedOS Id="{8e0f7a12-bfb3-4fe8-b9a5-48fd50a15a9a}"/>
|
||||
</application>
|
||||
</compatibility>
|
||||
</assembly>
|
||||
@@ -1,41 +1,35 @@
|
||||
# -*- mode: python -*-
|
||||
|
||||
# Note: This script is provided AS-IS for those that may be interested.
|
||||
# pyfa does not currently support pyInstaller (or any other build process) 100% at the moment
|
||||
|
||||
# Command line to build:
|
||||
# (Run from directory where pyfa.py and pyfa.spec lives.)
|
||||
# c:\Python27\scripts\pyinstaller.exe --clean --noconfirm --windowed --upx-dir=.\scripts\upx.exe pyfa.spec
|
||||
|
||||
# Don't forget to change the path to where your pyfa.py and pyfa.spec lives
|
||||
# pathex=['C:\\Users\\Ebag333\\Documents\\GitHub\\Ebag333\\Pyfa'],
|
||||
|
||||
import os
|
||||
from itertools import chain
|
||||
import subprocess
|
||||
import requests.certs
|
||||
|
||||
label = subprocess.check_output(["git", "describe", "--tags"]).strip()
|
||||
|
||||
with open('.version', 'w+') as f:
|
||||
f.write(label.decode())
|
||||
|
||||
block_cipher = None
|
||||
|
||||
added_files = [
|
||||
('../../imgs/gui/*.png', 'imgs/gui'),
|
||||
('../../imgs/gui/*.gif', 'imgs/gui'),
|
||||
('../../imgs/icons/*.png', 'imgs/icons'),
|
||||
('../../imgs/renders/*.png', 'imgs/renders'),
|
||||
('../../service/jargon/*.yaml', 'service/jargon'),
|
||||
('../../dist_assets/win/pyfa.ico', '.'),
|
||||
('../../dist_assets/win/pyfa.exe.manifest', '.'),
|
||||
(requests.certs.where(), '.'), # is this needed anymore?
|
||||
('../../eve.db', '.'),
|
||||
('../../README.md', '.'),
|
||||
('../../LICENSE', '.'),
|
||||
('../../version.yml', '.'),
|
||||
( 'imgs/gui/*.png', 'imgs/gui' ),
|
||||
( 'imgs/gui/*.gif', 'imgs/gui' ),
|
||||
( 'imgs/icons/*.png', 'imgs/icons' ),
|
||||
( 'imgs/renders/*.png', 'imgs/renders' ),
|
||||
( 'dist_assets/win/pyfa.ico', '.' ),
|
||||
( 'dist_assets/cacert.pem', '.' ),
|
||||
( 'eve.db', '.' ),
|
||||
( 'README.md', '.' ),
|
||||
( 'LICENSE', '.' ),
|
||||
]
|
||||
|
||||
import_these = [
|
||||
'numpy.core._dtype_ctypes', # https://github.com/pyinstaller/pyinstaller/issues/3982
|
||||
'sqlalchemy.ext.baked', # windows build doesn't launch without if when using sqlalchemy 1.3.x
|
||||
'pkg_resources.py2_warn' # issue 2156
|
||||
]
|
||||
import_these = []
|
||||
|
||||
# Walk directories that do dynamic importing
|
||||
paths = ('eos/db/migrations', 'service/conversions')
|
||||
for root, folders, files in chain.from_iterable(os.walk(path) for path in paths):
|
||||
# Walk eos.effects and add all effects so we can import them properly
|
||||
for root, folders, files in os.walk("eos/effects"):
|
||||
for file_ in files:
|
||||
if file_.endswith(".py") and not file_.startswith("_"):
|
||||
mod_name = "{}.{}".format(
|
||||
@@ -44,24 +38,25 @@ for root, folders, files in chain.from_iterable(os.walk(path) for path in paths)
|
||||
)
|
||||
import_these.append(mod_name)
|
||||
|
||||
a = Analysis(['../../pyfa.py'],
|
||||
pathex=[
|
||||
# Need this, see https://github.com/pyinstaller/pyinstaller/issues/1566
|
||||
# To get this, download and install windows 10 SDK
|
||||
# If not building on Windows 10, this might be optional
|
||||
r'C:\Program Files (x86)\Windows Kits\10\Redist\ucrt\DLLs\x86'],
|
||||
a = Analysis(
|
||||
['pyfa.py'],
|
||||
pathex=['C:\\projects\\pyfa\\'],
|
||||
binaries=[],
|
||||
datas=added_files,
|
||||
hiddenimports=import_these,
|
||||
hookspath=['dist_assets/pyinstaller_hooks'],
|
||||
hookspath=[],
|
||||
runtime_hooks=[],
|
||||
excludes=['Tkinter'],
|
||||
excludes=[],
|
||||
win_no_prefer_redirects=False,
|
||||
win_private_assemblies=False,
|
||||
cipher=block_cipher)
|
||||
cipher=block_cipher,
|
||||
)
|
||||
|
||||
pyz = PYZ(a.pure, a.zipped_data,
|
||||
cipher=block_cipher)
|
||||
pyz = PYZ(
|
||||
a.pure,
|
||||
a.zipped_data,
|
||||
cipher=block_cipher,
|
||||
)
|
||||
|
||||
exe = EXE(pyz,
|
||||
a.scripts,
|
||||
@@ -72,6 +67,7 @@ exe = EXE(pyz,
|
||||
upx=True,
|
||||
name='pyfa',
|
||||
icon='dist_assets/win/pyfa.ico',
|
||||
onefile=False,
|
||||
)
|
||||
|
||||
coll = COLLECT(
|
||||
@@ -81,7 +77,7 @@ coll = COLLECT(
|
||||
a.datas,
|
||||
strip=False,
|
||||
upx=True,
|
||||
onefile=False,
|
||||
name='pyfa',
|
||||
icon='dist_assets/win/pyfa.ico',
|
||||
)
|
||||
|
||||
|
||||
83
dist_assets/win/pyfa_debug.spec
Normal file
83
dist_assets/win/pyfa_debug.spec
Normal file
@@ -0,0 +1,83 @@
|
||||
# -*- mode: python -*-
|
||||
|
||||
# Note: This script is provided AS-IS for those that may be interested.
|
||||
# pyfa does not currently support pyInstaller (or any other build process) 100% at the moment
|
||||
|
||||
# Command line to build:
|
||||
# (Run from directory where pyfa.py and pyfa.spec lives.)
|
||||
# c:\Python27\scripts\pyinstaller.exe --clean --noconfirm --windowed --upx-dir=.\scripts\upx.exe pyfa.spec
|
||||
|
||||
# Don't forget to change the path to where your pyfa.py and pyfa.spec lives
|
||||
# pathex=['C:\\Users\\Ebag333\\Documents\\GitHub\\Ebag333\\Pyfa'],
|
||||
|
||||
import os
|
||||
|
||||
block_cipher = None
|
||||
|
||||
added_files = [
|
||||
( 'imgs/gui/*.png', 'imgs/gui' ),
|
||||
( 'imgs/gui/*.gif', 'imgs/gui' ),
|
||||
( 'imgs/icons/*.png', 'imgs/icons' ),
|
||||
( 'imgs/renders/*.png', 'imgs/renders' ),
|
||||
( 'dist_assets/win/pyfa.ico', '.' ),
|
||||
( 'dist_assets/cacert.pem', '.' ),
|
||||
( 'eve.db', '.' ),
|
||||
( 'README.md', '.' ),
|
||||
( 'LICENSE', '.' ),
|
||||
]
|
||||
|
||||
import_these = []
|
||||
|
||||
# Walk eos.effects and add all effects so we can import them properly
|
||||
for root, folders, files in os.walk("eos/effects"):
|
||||
for file_ in files:
|
||||
if file_.endswith(".py") and not file_.startswith("_"):
|
||||
mod_name = "{}.{}".format(
|
||||
root.replace("/", "."),
|
||||
file_.split(".py")[0],
|
||||
)
|
||||
import_these.append(mod_name)
|
||||
|
||||
a = Analysis(
|
||||
['pyfa.py'],
|
||||
pathex=['C:\\projects\\pyfa\\'],
|
||||
binaries=[],
|
||||
datas=added_files,
|
||||
hiddenimports=import_these,
|
||||
hookspath=[],
|
||||
runtime_hooks=[],
|
||||
excludes=[],
|
||||
win_no_prefer_redirects=False,
|
||||
win_private_assemblies=False,
|
||||
cipher=block_cipher,
|
||||
)
|
||||
|
||||
pyz = PYZ(
|
||||
a.pure,
|
||||
a.zipped_data,
|
||||
cipher=block_cipher,
|
||||
)
|
||||
|
||||
exe = EXE(pyz,
|
||||
a.scripts,
|
||||
exclude_binaries=True,
|
||||
debug=True,
|
||||
console=True,
|
||||
strip=False,
|
||||
upx=True,
|
||||
name='pyfa_debug',
|
||||
icon='dist_assets/win/pyfa.ico',
|
||||
onefile=False,
|
||||
)
|
||||
|
||||
coll = COLLECT(
|
||||
exe,
|
||||
a.binaries,
|
||||
a.zipfiles,
|
||||
a.datas,
|
||||
strip=False,
|
||||
upx=True,
|
||||
onefile=False,
|
||||
name='pyfa_debug',
|
||||
icon='dist_assets/win/pyfa.ico',
|
||||
)
|
||||
@@ -1,45 +0,0 @@
|
||||
# UTF-8
|
||||
#
|
||||
# For more details about fixed file info 'ffi' see:
|
||||
# http://msdn.microsoft.com/en-us/library/ms646997.aspx
|
||||
VSVersionInfo(
|
||||
ffi=FixedFileInfo(
|
||||
# filevers and prodvers should be always a tuple with four items: (1, 2, 3, 4)
|
||||
# Set not needed items to zero 0.
|
||||
filevers=(1, 15, 1, 0),
|
||||
prodvers=(1, 15, 1, 0),
|
||||
# Contains a bitmask that specifies the valid bits 'flags'r
|
||||
mask=0x3f,
|
||||
# Contains a bitmask that specifies the Boolean attributes of the file.
|
||||
flags=0x0,
|
||||
# The operating system for which this file was designed.
|
||||
# 0x4 - NT and there is no need to change it.
|
||||
OS=0x40004,
|
||||
# The general type of file.
|
||||
# 0x1 - the file is an application.
|
||||
fileType=0x1,
|
||||
# The function of the file.
|
||||
# 0x0 - the function is not defined for this fileType
|
||||
subtype=0x0,
|
||||
# Creation date and time stamp.
|
||||
date=(0, 0)
|
||||
),
|
||||
kids=[
|
||||
StringFileInfo(
|
||||
[
|
||||
StringTable(
|
||||
u'040904E4',
|
||||
[StringStruct(u'LegalCopyright', u''),
|
||||
StringStruct(u'InternalName', u'pyfa.exe'),
|
||||
StringStruct(u'FileVersion', u'1.15.1.0'),
|
||||
StringStruct(u'CompanyName', u''),
|
||||
StringStruct(u'OriginalFilename', u'pyfa.exe'),
|
||||
StringStruct(u'ProductVersion', u'1.15.1.0'),
|
||||
StringStruct(u'FileDescription', u'Python fitting assistant'),
|
||||
StringStruct(u'LegalTrademarks', u''),
|
||||
StringStruct(u'Comments', u''),
|
||||
StringStruct(u'ProductName', u'pyfa')])
|
||||
]),
|
||||
VarFileInfo([VarStruct(u'Translation', [1033, 1252])])
|
||||
]
|
||||
)
|
||||
@@ -1 +0,0 @@
|
||||
theme: jekyll-theme-midnight
|
||||
@@ -1,104 +0,0 @@
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv="Content-type" content="text/html;charset=UTF-8" />
|
||||
<title>pyfa Authentication Proxy</title>
|
||||
<style type="text/css">
|
||||
|
||||
|
||||
body { width: 700px; margin: 150px auto; }
|
||||
h1 { font-size: 40px; }
|
||||
h2 { font-size: 32px; }
|
||||
body { font: 20px Helvetica, sans-serif; color: #333; }
|
||||
#article { display: block; text-align: left; width: 650px; margin: 0 auto; }
|
||||
.hidden { display:none; }
|
||||
.success { color: #28a745; }
|
||||
.error { color: #dc3545; }
|
||||
textarea { width: 100%; height: 100px; }
|
||||
hr { border-width: 1px 0 0 0; border-style: solid; border-color: #333; }
|
||||
button { cursor: pointer; background-color: white; border: 1px solid #333; color: #333; padding: 8px 11px; text-align: center; text-decoration: none; display: inline-block; }
|
||||
@media (prefers-color-scheme: dark) {
|
||||
body { background-color: #333; color: white; }
|
||||
textarea { width: 100%; height: 100px; background-color: #aaa;}
|
||||
button { background-color: #333; border: 1px solid white; color: white; }
|
||||
hr { border-color: white; }
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<!-- Layout from Short Circuit's CREST login. Shout out! https://github.com/farshield/shortcircuit -->
|
||||
<h1>pyfa</h1>
|
||||
<div id="mode0" class="hidden">
|
||||
<p id="mode0-msg">Processing request...</p>
|
||||
<button id="showBtn" onClick="showCode()">Show Code</button>
|
||||
</div>
|
||||
<div id="mode1" class="hidden">
|
||||
<p id="mode1-p">Please copy and paste the token below into pyfa.</p>
|
||||
<textarea id="authCodeText" readonly></textarea>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
<script>
|
||||
debugger;
|
||||
function showCode() {
|
||||
var element = document.getElementById(`mode1`);
|
||||
element.classList.remove("hidden");
|
||||
element = document.getElementById(`showBtn`);
|
||||
element.classList.add("hidden");
|
||||
}
|
||||
|
||||
function httpPostAsync(url, callback)
|
||||
{
|
||||
var xmlHttp = new XMLHttpRequest();
|
||||
xmlHttp.onreadystatechange = function() {
|
||||
if (xmlHttp.readyState == XMLHttpRequest.DONE)
|
||||
callback(xmlHttp);
|
||||
}
|
||||
xmlHttp.open("GET", url, true); // true for asynchronous
|
||||
xmlHttp.send(null);
|
||||
}
|
||||
|
||||
const urlSearchParams = new URLSearchParams(window.location.search);
|
||||
const params = Object.fromEntries(urlSearchParams.entries());
|
||||
let stateInfo;
|
||||
try {
|
||||
stateInfo = JSON.parse(atob(params.state))
|
||||
} catch (err) {
|
||||
// something has happened and we cannot continue.
|
||||
// todo: show a simple message and exit.
|
||||
throw err;
|
||||
}
|
||||
|
||||
// we always want to show the text box for manual.
|
||||
var element = document.getElementById(`mode${stateInfo.mode}`);
|
||||
element.classList.remove("hidden");
|
||||
|
||||
if (stateInfo.mode === 0) {
|
||||
let p = document.getElementById(`mode1-p`);
|
||||
p.prepend(document.createElement("hr"))
|
||||
}
|
||||
debugger;
|
||||
document.getElementById(`authCodeText`).value = btoa(JSON.stringify(params))
|
||||
|
||||
if (stateInfo.mode == 0) { // auto / server mode
|
||||
httpPostAsync(stateInfo.redirect + window.location.search, (req)=>{
|
||||
debugger;
|
||||
const msgDiv = document.getElementById(`mode0-msg`);
|
||||
if (req.status === 200) {
|
||||
msgDiv.innerHTML ="<span class='success'>Success!</span> You may close this window and return to the application.";
|
||||
return;
|
||||
} else if (req.status === 0){
|
||||
msgDiv.innerHTML = "<span class='error'>Error!</span> Server response not received.<p><small>The local pyfa server may have timed out, or may not have started correctly.</small></p>";
|
||||
} else if (req.status === 400){
|
||||
msgDiv.innerHTML = `<span class='error'>Error!</span> <p><small>${req.responseText}</small></p>`
|
||||
} else {
|
||||
msgDiv.innerHTML = `<span class='error'>Error!</span> <p><small>There was an unknown error. Please report this to the pyfa issues page.</p><p><textarea readdonly>${req.responseText}</textarea></small></p>`
|
||||
}
|
||||
showCode()
|
||||
// todo: bad request error when it's not an error itself, but rather
|
||||
})
|
||||
// todo: post message to local EVE server
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,37 +0,0 @@
|
||||
## Welcome to GitHub Pages
|
||||
|
||||
You can use the [editor on GitHub](https://github.com/pyfa-org/Pyfa/edit/gh-pages/index.md) to maintain and preview the content for your website in Markdown files.
|
||||
|
||||
Whenever you commit to this repository, GitHub Pages will run [Jekyll](https://jekyllrb.com/) to rebuild the pages in your site, from the content in your Markdown files.
|
||||
|
||||
### Markdown
|
||||
|
||||
Markdown is a lightweight and easy-to-use syntax for styling your writing. It includes conventions for
|
||||
|
||||
```markdown
|
||||
Syntax highlighted code block
|
||||
|
||||
# Header 1
|
||||
## Header 2
|
||||
### Header 3
|
||||
|
||||
- Bulleted
|
||||
- List
|
||||
|
||||
1. Numbered
|
||||
2. List
|
||||
|
||||
**Bold** and _Italic_ and `Code` text
|
||||
|
||||
[Link](url) and 
|
||||
```
|
||||
|
||||
For more details see [GitHub Flavored Markdown](https://guides.github.com/features/mastering-markdown/).
|
||||
|
||||
### Jekyll Themes
|
||||
|
||||
Your Pages site will use the layout and styles from the Jekyll theme you have selected in your [repository settings](https://github.com/pyfa-org/Pyfa/settings/pages). The name of this theme is saved in the Jekyll `_config.yml` configuration file.
|
||||
|
||||
### Support or Contact
|
||||
|
||||
Having trouble with Pages? Check out our [documentation](https://docs.github.com/categories/github-pages-basics/) or [contact support](https://support.github.com/contact) and we’ll help you sort it out.
|
||||
71
eos/calc.py
71
eos/calc.py
@@ -1,71 +0,0 @@
|
||||
# =============================================================================
|
||||
# Copyright (C) 2019 Ryan Holmes
|
||||
#
|
||||
# This file is part of pyfa.
|
||||
#
|
||||
# pyfa is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# pyfa is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
# =============================================================================
|
||||
|
||||
|
||||
import math
|
||||
|
||||
|
||||
# Just copy-paste penalization chain calculation code (with some modifications,
|
||||
# as multipliers arrive in different form) in here to not make actual attribute
|
||||
# calculations slower than they already are due to extra function calls
|
||||
def calculateMultiplier(multipliers):
|
||||
"""
|
||||
multipliers: dictionary in format:
|
||||
{stacking group name: [(mult, resist attr ID), (mult, resist attr ID)]}
|
||||
"""
|
||||
val = 1
|
||||
for penalizedMultipliers in multipliers.values():
|
||||
# A quick explanation of how this works:
|
||||
# 1: Bonuses and penalties are calculated seperately, so we'll have to filter each of them
|
||||
l1 = [v[0] for v in penalizedMultipliers if v[0] > 1]
|
||||
l2 = [v[0] for v in penalizedMultipliers if v[0] < 1]
|
||||
# 2: The most significant bonuses take the smallest penalty,
|
||||
# This means we'll have to sort
|
||||
abssort = lambda _val: -abs(_val - 1)
|
||||
l1.sort(key=abssort)
|
||||
l2.sort(key=abssort)
|
||||
# 3: The first module doesn't get penalized at all
|
||||
# Any module after the first takes penalties according to:
|
||||
# 1 + (multiplier - 1) * math.exp(- math.pow(i, 2) / 7.1289)
|
||||
for l in (l1, l2):
|
||||
for i in range(len(l)):
|
||||
bonus = l[i]
|
||||
val *= 1 + (bonus - 1) * math.exp(- i ** 2 / 7.1289)
|
||||
return val
|
||||
|
||||
|
||||
def calculateRangeFactor(srcOptimalRange, srcFalloffRange, distance, restrictedRange=True):
|
||||
"""Range strength/chance factor, applicable to guns, ewar, RRs, etc."""
|
||||
if distance is None:
|
||||
return 1
|
||||
if srcFalloffRange > 0:
|
||||
# Most modules cannot be activated when at 3x falloff range, with few exceptions like guns
|
||||
if restrictedRange and distance > srcOptimalRange + 3 * srcFalloffRange:
|
||||
return 0
|
||||
return 0.5 ** ((max(0, distance - srcOptimalRange) / srcFalloffRange) ** 2)
|
||||
elif distance <= srcOptimalRange:
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
def calculateLockTime(srcScanRes, tgtSigRadius):
|
||||
if not srcScanRes or not tgtSigRadius:
|
||||
return None
|
||||
return min(40000 / srcScanRes / math.asinh(tgtSigRadius) ** 2, 30 * 60)
|
||||
197
eos/capSim.py
197
eos/capSim.py
@@ -1,7 +1,6 @@
|
||||
import heapq
|
||||
import time
|
||||
from math import sqrt, exp
|
||||
from collections import Counter
|
||||
|
||||
DAY = 24 * 60 * 60 * 1000
|
||||
|
||||
@@ -13,7 +12,7 @@ def lcm(a, b):
|
||||
return n / a
|
||||
|
||||
|
||||
class CapSimulator:
|
||||
class CapSimulator(object):
|
||||
"""Entity's EVE Capacitor Simulator"""
|
||||
|
||||
def __init__(self):
|
||||
@@ -21,7 +20,6 @@ class CapSimulator:
|
||||
|
||||
self.capacitorCapacity = 100
|
||||
self.capacitorRecharge = 1000
|
||||
self.startingCapacity = 1000
|
||||
|
||||
# max simulated time.
|
||||
self.t_max = DAY
|
||||
@@ -42,14 +40,6 @@ class CapSimulator:
|
||||
# relevant decimal digits of capacitor for LCM period optimization
|
||||
self.stability_precision = 1
|
||||
|
||||
# Stores how cap sim changed cap values outside of cap regen time
|
||||
self.saved_changes = ()
|
||||
self.saved_changes_internal = None
|
||||
|
||||
# Reports if sim was stopped due to detecting stability early
|
||||
self.optimize_repeats = True
|
||||
self.result_optimized_repeats = None
|
||||
|
||||
def scale_activation(self, duration, capNeed):
|
||||
for res in self.scale_resolutions:
|
||||
mod = duration % res
|
||||
@@ -68,7 +58,7 @@ class CapSimulator:
|
||||
return duration, capNeed
|
||||
|
||||
def init(self, modules):
|
||||
"""prepare modules. a list of (duration, capNeed, clipSize, disableStagger, reloadTime, isInjector) tuples is
|
||||
"""prepare modules. a list of (duration, capNeed, clipSize, disableStagger) tuples is
|
||||
expected, with clipSize 0 if the module has infinite ammo.
|
||||
"""
|
||||
self.modules = modules
|
||||
@@ -76,57 +66,48 @@ class CapSimulator:
|
||||
def reset(self):
|
||||
"""Reset the simulator state"""
|
||||
self.state = []
|
||||
self.saved_changes_internal = {}
|
||||
self.result_optimized_repeats = False
|
||||
mods = {}
|
||||
period = 1
|
||||
disable_period = False
|
||||
|
||||
# Loop over modules, clearing clipSize if applicable, and group modules based on attributes
|
||||
for (duration, capNeed, clipSize, disableStagger, reloadTime, isInjector) in self.modules:
|
||||
for (duration, capNeed, clipSize, disableStagger, reloadTime) in self.modules:
|
||||
if self.scale:
|
||||
duration, capNeed = self.scale_activation(duration, capNeed)
|
||||
|
||||
# set clipSize to infinite if reloads are disabled unless it's
|
||||
# a cap booster module
|
||||
if not self.reload and not isInjector:
|
||||
# a cap booster module.
|
||||
if not self.reload and capNeed > 0:
|
||||
clipSize = 0
|
||||
reloadTime = 0
|
||||
|
||||
# Group modules based on their properties
|
||||
key = (duration, capNeed, clipSize, disableStagger, reloadTime, isInjector)
|
||||
if key in mods:
|
||||
mods[key] += 1
|
||||
if (duration, capNeed, clipSize, disableStagger, reloadTime) in mods:
|
||||
mods[(duration, capNeed, clipSize, disableStagger, reloadTime)] += 1
|
||||
else:
|
||||
mods[key] = 1
|
||||
mods[(duration, capNeed, clipSize, disableStagger, reloadTime)] = 1
|
||||
|
||||
# Loop over grouped modules, configure staggering and push to the simulation state
|
||||
for (duration, capNeed, clipSize, disableStagger, reloadTime, isInjector), amount in mods.items():
|
||||
# period optimization doesn't work when reloads are active.
|
||||
if clipSize:
|
||||
disable_period = True
|
||||
# Just push multiple instances if item is injector. We do not want to stagger them as we will
|
||||
# use them as needed and want them to be available right away
|
||||
if isInjector:
|
||||
for i in range(amount):
|
||||
heapq.heappush(self.state, [0, duration, capNeed, 0, clipSize, reloadTime, isInjector])
|
||||
continue
|
||||
for (duration, capNeed, clipSize, disableStagger, reloadTime), amount in mods.iteritems():
|
||||
if self.stagger and not disableStagger:
|
||||
# Stagger all mods if they do not need to be reloaded
|
||||
if clipSize == 0:
|
||||
duration = int(duration / amount)
|
||||
# Stagger mods after first
|
||||
else:
|
||||
stagger_amount = (duration * clipSize + reloadTime) / (amount * clipSize)
|
||||
for i in range(1, amount):
|
||||
heapq.heappush(self.state, [i * stagger_amount, duration, capNeed, 0, clipSize, reloadTime, isInjector])
|
||||
# If mods are not staggered - just multiply cap use
|
||||
heapq.heappush(self.state,
|
||||
[i * stagger_amount, duration,
|
||||
capNeed, 0, clipSize, reloadTime])
|
||||
else:
|
||||
capNeed *= amount
|
||||
|
||||
period = lcm(period, duration)
|
||||
|
||||
heapq.heappush(self.state, [0, duration, capNeed, 0, clipSize, reloadTime, isInjector])
|
||||
# period optimization doesn't work when reloads are active.
|
||||
if clipSize:
|
||||
disable_period = True
|
||||
|
||||
heapq.heappush(self.state, [0, duration, capNeed, 0, clipSize, reloadTime])
|
||||
|
||||
if disable_period:
|
||||
self.period = self.t_max
|
||||
@@ -137,8 +118,7 @@ class CapSimulator:
|
||||
"""Run the simulation"""
|
||||
|
||||
start = time.time()
|
||||
awaitingInjectors = []
|
||||
awaitingInjectorsCounterWrap = Counter()
|
||||
|
||||
self.reset()
|
||||
|
||||
push = heapq.heappush
|
||||
@@ -148,36 +128,27 @@ class CapSimulator:
|
||||
stability_precision = self.stability_precision
|
||||
period = self.period
|
||||
|
||||
activation = None
|
||||
iterations = 0
|
||||
|
||||
capCapacity = self.capacitorCapacity
|
||||
tau = self.capacitorRecharge / 5.0
|
||||
|
||||
cap_wrap = self.startingCapacity # cap value at last period
|
||||
cap_lowest = self.startingCapacity # lowest cap value encountered
|
||||
cap_lowest_pre = self.startingCapacity # lowest cap value before activations
|
||||
cap = self.startingCapacity # current cap value
|
||||
cap_wrap = capCapacity # cap value at last period
|
||||
cap_lowest = capCapacity # lowest cap value encountered
|
||||
cap_lowest_pre = capCapacity # lowest cap value before activations
|
||||
cap = capCapacity # current cap value
|
||||
t_wrap = self.period # point in time of next period
|
||||
|
||||
t_last = 0
|
||||
t_max = self.t_max
|
||||
|
||||
while 1:
|
||||
# Nothing to pop - might happen when no mods are activated, or when
|
||||
# only cap injectors are active (and are postponed by code below)
|
||||
try:
|
||||
activation = pop(state)
|
||||
except IndexError:
|
||||
break
|
||||
t_now, duration, capNeed, shot, clipSize, reloadTime, isInjector = activation
|
||||
|
||||
# Max time reached, stop simulation - we're stable
|
||||
activation = pop(state)
|
||||
t_now, duration, capNeed, shot, clipSize, reloadTime = activation
|
||||
if t_now >= t_max:
|
||||
break
|
||||
|
||||
# Regenerate cap from last time point
|
||||
if t_now > t_last:
|
||||
cap = ((1.0 + (sqrt(cap / capCapacity) - 1.0) * exp((t_last - t_now) / tau)) ** 2) * capCapacity
|
||||
cap = ((1.0 + (sqrt(cap / capCapacity) - 1.0) * exp((t_last - t_now) / tau)) ** 2) * capCapacity
|
||||
|
||||
if t_now != t_last:
|
||||
if cap < cap_lowest_pre:
|
||||
@@ -185,104 +156,36 @@ class CapSimulator:
|
||||
if t_now == t_wrap:
|
||||
# history is repeating itself, so if we have more cap now than last
|
||||
# time this happened, it is a stable setup.
|
||||
awaitingInjectorsCounterNow = Counter(awaitingInjectors)
|
||||
if self.optimize_repeats and cap >= cap_wrap and awaitingInjectorsCounterNow == awaitingInjectorsCounterWrap:
|
||||
self.result_optimized_repeats = True
|
||||
if cap >= cap_wrap:
|
||||
break
|
||||
cap_wrap = round(cap, stability_precision)
|
||||
awaitingInjectorsCounterWrap = awaitingInjectorsCounterNow
|
||||
t_wrap += period
|
||||
|
||||
t_last = t_now
|
||||
cap -= capNeed
|
||||
if cap > capCapacity:
|
||||
cap = capCapacity
|
||||
|
||||
iterations += 1
|
||||
|
||||
# If injecting cap will "overshoot" max cap, postpone it
|
||||
if isInjector and cap - capNeed > capCapacity:
|
||||
awaitingInjectors.append((duration, capNeed, shot, clipSize, reloadTime, isInjector))
|
||||
t_last = t_now
|
||||
|
||||
else:
|
||||
# If we will need more cap than we have, but we are not at 100% -
|
||||
# use awaiting cap injectors to top us up until we have enough or
|
||||
# until we're full
|
||||
if capNeed > cap and cap < capCapacity:
|
||||
while awaitingInjectors and capNeed > cap and capCapacity > cap:
|
||||
neededInjection = min(capNeed - cap, capCapacity - cap)
|
||||
# Find injectors which have just enough cap or more
|
||||
goodInjectors = [i for i in awaitingInjectors if -i[1] >= neededInjection]
|
||||
if goodInjectors:
|
||||
# Pick injector which overshoots the least
|
||||
bestInjector = min(goodInjectors, key=lambda i: -i[1])
|
||||
else:
|
||||
# Take the one which provides the most cap
|
||||
bestInjector = max(goodInjectors, key=lambda i: -i[1])
|
||||
# Use injector
|
||||
awaitingInjectors.remove(bestInjector)
|
||||
inj_duration, inj_capNeed, inj_shot, inj_clipSize, inj_reloadTime, inj_isInjector = bestInjector
|
||||
cap -= inj_capNeed
|
||||
if cap > capCapacity:
|
||||
cap = capCapacity
|
||||
self.saved_changes_internal[t_now] = cap
|
||||
# Add injector to regular state tracker
|
||||
inj_t_now = t_now
|
||||
inj_t_now += inj_duration
|
||||
inj_shot += 1
|
||||
if inj_clipSize:
|
||||
if inj_shot % inj_clipSize == 0:
|
||||
inj_shot = 0
|
||||
inj_t_now += inj_reloadTime
|
||||
push(state, [inj_t_now, inj_duration, inj_capNeed, inj_shot, inj_clipSize, inj_reloadTime, inj_isInjector])
|
||||
if cap < cap_lowest:
|
||||
if cap < 0.0:
|
||||
break
|
||||
cap_lowest = cap
|
||||
|
||||
# Apply cap modification
|
||||
cap -= capNeed
|
||||
if cap > capCapacity:
|
||||
cap = capCapacity
|
||||
self.saved_changes_internal[t_now] = cap
|
||||
# queue the next activation of this module
|
||||
t_now += duration
|
||||
shot += 1
|
||||
if clipSize:
|
||||
if shot % clipSize == 0:
|
||||
shot = 0
|
||||
t_now += reloadTime # include reload time
|
||||
activation[0] = t_now
|
||||
activation[3] = shot
|
||||
|
||||
if cap < cap_lowest:
|
||||
# Negative cap - we're unstable, simulation is over
|
||||
if cap < 0.0:
|
||||
break
|
||||
cap_lowest = cap
|
||||
|
||||
# Try using awaiting injectors to top up the cap after spending some
|
||||
while awaitingInjectors and cap < capCapacity:
|
||||
neededInjection = capCapacity - cap
|
||||
# Find injectors which do not overshoot max cap
|
||||
goodInjectors = [i for i in awaitingInjectors if -i[1] <= neededInjection]
|
||||
if not goodInjectors:
|
||||
break
|
||||
# Take the one which provides the most cap
|
||||
bestInjector = max(goodInjectors, key=lambda i: -i[1])
|
||||
# Use injector
|
||||
awaitingInjectors.remove(bestInjector)
|
||||
inj_duration, inj_capNeed, inj_shot, inj_clipSize, inj_reloadTime, inj_isInjector = bestInjector
|
||||
cap -= inj_capNeed
|
||||
if cap > capCapacity:
|
||||
cap = capCapacity
|
||||
self.saved_changes_internal[t_now] = cap
|
||||
# Add injector to regular state tracker
|
||||
inj_t_now = t_now
|
||||
inj_t_now += inj_duration
|
||||
inj_shot += 1
|
||||
if inj_clipSize:
|
||||
if inj_shot % inj_clipSize == 0:
|
||||
inj_shot = 0
|
||||
inj_t_now += inj_reloadTime
|
||||
push(state, [inj_t_now, inj_duration, inj_capNeed, inj_shot, inj_clipSize, inj_reloadTime, inj_isInjector])
|
||||
|
||||
# queue the next activation of this module
|
||||
t_now += duration
|
||||
shot += 1
|
||||
if clipSize:
|
||||
if shot % clipSize == 0:
|
||||
shot = 0
|
||||
t_now += reloadTime # include reload time
|
||||
activation[0] = t_now
|
||||
activation[3] = shot
|
||||
|
||||
push(state, activation)
|
||||
if activation is not None:
|
||||
push(state, activation)
|
||||
push(state, activation)
|
||||
|
||||
# update instance with relevant results.
|
||||
self.t = t_last
|
||||
@@ -290,7 +193,7 @@ class CapSimulator:
|
||||
|
||||
# calculate EVE's stability value
|
||||
try:
|
||||
avgDrain = sum(x[2] / x[1] for x in self.state)
|
||||
avgDrain = reduce(float.__add__, map(lambda x: x[2] / x[1], self.state), 0.0)
|
||||
self.cap_stable_eve = 0.25 * (1.0 + sqrt(-(2.0 * avgDrain * tau - capCapacity) / capCapacity)) ** 2
|
||||
except ValueError:
|
||||
self.cap_stable_eve = 0.0
|
||||
@@ -300,9 +203,7 @@ class CapSimulator:
|
||||
self.cap_stable_low = cap_lowest
|
||||
self.cap_stable_high = cap_lowest_pre
|
||||
else:
|
||||
self.cap_stable_low = self.cap_stable_high = 0.0
|
||||
|
||||
self.saved_changes = tuple((k / 1000, max(0, self.saved_changes_internal[k])) for k in sorted(self.saved_changes_internal))
|
||||
self.saved_changes_internal = None
|
||||
self.cap_stable_low = \
|
||||
self.cap_stable_high = 0.0
|
||||
|
||||
self.runtime = time.time() - start
|
||||
|
||||
@@ -11,41 +11,21 @@ debug = False
|
||||
gamedataCache = True
|
||||
saveddataCache = True
|
||||
gamedata_version = ""
|
||||
gamedata_date = ""
|
||||
gamedata_connectionstring = 'sqlite:///' + realpath(join(dirname(abspath(__file__)), "..", "eve.db"))
|
||||
|
||||
lang = ""
|
||||
|
||||
# Maps supported langauges to their suffix in the database
|
||||
translation_mapping = {
|
||||
"en": "",
|
||||
"fr": "_fr",
|
||||
# "it": "_it",
|
||||
"ja": "_ja",
|
||||
"ko": "_ko",
|
||||
"ru": "_ru",
|
||||
"zh": "_zh",
|
||||
}
|
||||
|
||||
def set_lang(i18n_lang):
|
||||
global lang
|
||||
lang = translation_mapping.get(i18n_lang, translation_mapping.get("en"))
|
||||
|
||||
gamedata_connectionstring = 'sqlite:///' + unicode(realpath(join(dirname(abspath(__file__)), "..", "eve.db")), sys.getfilesystemencoding())
|
||||
pyfalog.debug("Gamedata connection string: {0}", gamedata_connectionstring)
|
||||
|
||||
if istravis is True or hasattr(sys, '_called_from_test'):
|
||||
# Running in Travis. Run saveddata database in memory.
|
||||
saveddata_connectionstring = 'sqlite:///:memory:'
|
||||
else:
|
||||
saveddata_connectionstring = 'sqlite:///' + realpath(join(dirname(abspath(__file__)), "..", "saveddata", "saveddata.db"))
|
||||
saveddata_connectionstring = 'sqlite:///' + unicode(realpath(join(dirname(abspath(__file__)), "..", "saveddata", "saveddata.db")), sys.getfilesystemencoding())
|
||||
|
||||
pyfalog.debug("Saveddata connection string: {0}", saveddata_connectionstring)
|
||||
|
||||
settings = {
|
||||
"useStaticAdaptiveArmorHardener": False,
|
||||
"strictSkillLevels": True,
|
||||
"globalDefaultSpoolupPercentage": 1.0
|
||||
}
|
||||
|
||||
# Autodetect path, only change if the autodetection bugs out.
|
||||
path = dirname(__file__)
|
||||
path = dirname(unicode(__file__, sys.getfilesystemencoding()))
|
||||
|
||||
115
eos/const.py
115
eos/const.py
@@ -1,115 +0,0 @@
|
||||
# =============================================================================
|
||||
# Copyright (C) 2019 Ryan Holmes
|
||||
#
|
||||
# This file is part of pyfa.
|
||||
#
|
||||
# pyfa is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# pyfa is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
# =============================================================================
|
||||
|
||||
from enum import IntEnum,unique
|
||||
|
||||
|
||||
@unique
|
||||
class FittingSlot(IntEnum):
|
||||
"""
|
||||
Contains slots for ship fittings
|
||||
"""
|
||||
# These are self-explanatory
|
||||
LOW = 1
|
||||
MED = 2
|
||||
HIGH = 3
|
||||
RIG = 4
|
||||
SUBSYSTEM = 5
|
||||
# not a real slot, need for pyfa display rack separation
|
||||
MODE = 6
|
||||
# system effects. They are projected "modules" and pyfa assumes all modules
|
||||
# have a slot. In this case, make one up.
|
||||
SYSTEM = 7
|
||||
# used for citadel services
|
||||
SERVICE = 8
|
||||
# fighter 'slots'. Just easier to put them here...
|
||||
F_LIGHT = 10
|
||||
F_SUPPORT = 11
|
||||
F_HEAVY = 12
|
||||
# fighter 'slots' (for structures)
|
||||
FS_LIGHT = 13
|
||||
FS_SUPPORT = 14
|
||||
FS_HEAVY = 15
|
||||
|
||||
|
||||
@unique
|
||||
class ImplantLocation(IntEnum):
|
||||
"""
|
||||
Contains location of the implant
|
||||
"""
|
||||
FIT = 0
|
||||
CHARACTER = 1
|
||||
|
||||
|
||||
@unique
|
||||
class CalcType(IntEnum):
|
||||
"""
|
||||
Contains location of the calculation
|
||||
"""
|
||||
LOCAL = 0
|
||||
PROJECTED = 1
|
||||
COMMAND = 2
|
||||
|
||||
|
||||
@unique
|
||||
class FittingModuleState(IntEnum):
|
||||
"""
|
||||
Contains the state of a fitting module
|
||||
"""
|
||||
OFFLINE = -1
|
||||
ONLINE = 0
|
||||
ACTIVE = 1
|
||||
OVERHEATED = 2
|
||||
|
||||
|
||||
@unique
|
||||
class FittingHardpoint(IntEnum):
|
||||
"""
|
||||
Contains the types of a fitting hardpoint
|
||||
"""
|
||||
NONE = 0
|
||||
MISSILE = 1
|
||||
TURRET = 2
|
||||
|
||||
|
||||
@unique
|
||||
class SpoolType(IntEnum):
|
||||
# Spool and cycle scale are different in case if max spool amount cannot
|
||||
# be divided by spool step without remainder
|
||||
SPOOL_SCALE = 0 # [0..1]
|
||||
CYCLE_SCALE = 1 # [0..1]
|
||||
TIME = 2 # Expressed via time in seconds since spool up started
|
||||
CYCLES = 3 # Expressed in amount of cycles since spool up started
|
||||
|
||||
|
||||
@unique
|
||||
class FitSystemSecurity(IntEnum):
|
||||
HISEC = 0
|
||||
LOWSEC = 1
|
||||
NULLSEC = 2
|
||||
WSPACE = 3
|
||||
|
||||
|
||||
@unique
|
||||
class Operator(IntEnum):
|
||||
PREASSIGN = 0
|
||||
PREINCREASE = 1
|
||||
MULTIPLY = 2
|
||||
POSTINCREASE = 3
|
||||
FORCE = 4
|
||||
@@ -17,84 +17,45 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
import re
|
||||
import threading
|
||||
|
||||
from sqlalchemy import MetaData, create_engine, event
|
||||
from sqlalchemy.orm import sessionmaker, scoped_session
|
||||
from sqlalchemy import MetaData, create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from . import migration
|
||||
import migration
|
||||
from eos import config
|
||||
from logbook import Logger
|
||||
|
||||
|
||||
pyfalog = Logger(__name__)
|
||||
pyfalog.info("Initializing database")
|
||||
pyfalog.info("Gamedata connection: {0}", config.gamedata_connectionstring)
|
||||
pyfalog.info("Saveddata connection: {0}", config.saveddata_connectionstring)
|
||||
|
||||
|
||||
class ReadOnlyException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def re_fn(expr, item):
|
||||
try:
|
||||
reg = re.compile(expr, re.IGNORECASE)
|
||||
except (SystemExit, KeyboardInterrupt):
|
||||
raise
|
||||
except:
|
||||
return False
|
||||
return reg.search(item) is not None
|
||||
|
||||
|
||||
pyfalog.debug('Initializing gamedata')
|
||||
gamedata_connectionstring = config.gamedata_connectionstring
|
||||
if callable(gamedata_connectionstring):
|
||||
gamedata_engine = create_engine("sqlite://", creator=gamedata_connectionstring, echo=config.debug)
|
||||
else:
|
||||
gamedata_engine = create_engine(gamedata_connectionstring, echo=config.debug)
|
||||
|
||||
|
||||
@event.listens_for(gamedata_engine, 'connect')
|
||||
def create_functions(dbapi_connection, connection_record):
|
||||
dbapi_connection.create_function('regexp', 2, re_fn)
|
||||
|
||||
|
||||
gamedata_meta = MetaData()
|
||||
gamedata_meta.bind = gamedata_engine
|
||||
GamedataSession = scoped_session(sessionmaker(bind=gamedata_engine, autoflush=False, expire_on_commit=False))
|
||||
gamedata_session = GamedataSession()
|
||||
gamedata_session = sessionmaker(bind=gamedata_engine, autoflush=False, expire_on_commit=False)()
|
||||
|
||||
gamedata_sessions = {threading.get_ident(): gamedata_session}
|
||||
|
||||
|
||||
def get_gamedata_session():
|
||||
thread_id = threading.get_ident()
|
||||
if thread_id not in gamedata_sessions:
|
||||
gamedata_sessions[thread_id] = GamedataSession()
|
||||
return gamedata_sessions[thread_id]
|
||||
|
||||
|
||||
pyfalog.debug('Getting gamedata version')
|
||||
# This should be moved elsewhere, maybe as an actual query. Current, without try-except, it breaks when making a new
|
||||
# game db because we haven't reached gamedata_meta.create_all()
|
||||
try:
|
||||
config.gamedata_version = gamedata_session.execute(
|
||||
"SELECT `field_value` FROM `metadata` WHERE `field_name` LIKE 'client_build'"
|
||||
).fetchone()[0]
|
||||
config.gamedata_date = gamedata_session.execute(
|
||||
"SELECT `field_value` FROM `metadata` WHERE `field_name` LIKE 'dump_time'"
|
||||
).fetchone()[0]
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except Exception as e:
|
||||
pyfalog.warning("Missing gamedata version.")
|
||||
pyfalog.critical(e)
|
||||
config.gamedata_version = None
|
||||
config.gamedata_date = None
|
||||
|
||||
pyfalog.debug('Initializing saveddata')
|
||||
saveddata_connectionstring = config.saveddata_connectionstring
|
||||
if saveddata_connectionstring is not None:
|
||||
if callable(saveddata_connectionstring):
|
||||
@@ -111,19 +72,16 @@ else:
|
||||
# Lock controlling any changes introduced to session
|
||||
sd_lock = threading.RLock()
|
||||
|
||||
pyfalog.debug('Importing gamedata DB scheme')
|
||||
# Import all the definitions for all our database stuff
|
||||
# noinspection PyPep8
|
||||
from eos.db.gamedata import alphaClones, attribute, category, effect, group, item, marketGroup, metaData, metaGroup, queries, traits, unit, dynamicAttributes, implantSet
|
||||
pyfalog.debug('Importing saveddata DB scheme')
|
||||
from eos.db.gamedata import alphaClones, attribute, category, effect, group, icon, item, marketGroup, metaData, metaGroup, queries, traits, unit
|
||||
# noinspection PyPep8
|
||||
from eos.db.saveddata import booster, cargo, character, damagePattern, databaseRepair, drone, fighter, fit, implant, implantSet, \
|
||||
miscData, mutatorMod, mutatorDrone, module, override, price, queries, skill, targetProfile, user
|
||||
from eos.db.saveddata import booster, cargo, character, crest, damagePattern, databaseRepair, drone, fighter, fit, implant, implantSet, loadDefaultDatabaseValues, \
|
||||
miscData, module, override, price, queries, skill, targetResists, user
|
||||
|
||||
pyfalog.debug('Importing gamedata queries')
|
||||
# Import queries
|
||||
# noinspection PyPep8
|
||||
from eos.db.gamedata.queries import *
|
||||
pyfalog.debug('Importing saveddata queries')
|
||||
# noinspection PyPep8
|
||||
from eos.db.saveddata.queries import *
|
||||
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
__all__ = ["attribute", "category", "effect", "group", "metaData", "dynamicAttributes",
|
||||
"item", "marketGroup", "metaGroup", "unit", "alphaClones", "implantSet"]
|
||||
__all__ = ["attribute", "category", "effect", "group", "metaData",
|
||||
"icon", "item", "marketGroup", "metaGroup", "unit", "alphaClones"]
|
||||
|
||||
@@ -22,8 +22,8 @@ from sqlalchemy.ext.associationproxy import association_proxy
|
||||
from sqlalchemy.orm import relation, mapper, synonym, deferred
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.gamedata import Attribute, AttributeInfo, Unit
|
||||
import eos.config
|
||||
from eos.gamedata import Attribute, AttributeInfo, Unit, Icon
|
||||
|
||||
typeattributes_table = Table("dgmtypeattribs", gamedata_meta,
|
||||
Column("value", Float),
|
||||
Column("typeID", Integer, ForeignKey("invtypes.typeID"), primary_key=True, index=True),
|
||||
@@ -36,11 +36,9 @@ attributes_table = Table("dgmattribs", gamedata_meta,
|
||||
Column("maxAttributeID", Integer, ForeignKey("dgmattribs.attributeID")),
|
||||
Column("description", Unicode),
|
||||
Column("published", Boolean),
|
||||
*[Column("displayName{}".format(lang), String) for lang in eos.config.translation_mapping.values()],
|
||||
Column("displayName", String),
|
||||
Column("highIsGood", Boolean),
|
||||
Column("iconID", Integer),
|
||||
Column("attributeCategory", Integer),
|
||||
# Column("tooltipDescription", Integer), # deprecated...?
|
||||
Column("iconID", Integer, ForeignKey("icons.iconID")),
|
||||
Column("unitID", Integer, ForeignKey("dgmunits.unitID")))
|
||||
|
||||
mapper(Attribute, typeattributes_table,
|
||||
@@ -48,17 +46,18 @@ mapper(Attribute, typeattributes_table,
|
||||
|
||||
mapper(AttributeInfo, attributes_table,
|
||||
properties={
|
||||
"icon" : relation(Icon),
|
||||
"unit" : relation(Unit),
|
||||
"ID" : synonym("attributeID"),
|
||||
"name" : synonym("attributeName"),
|
||||
"description": deferred(attributes_table.c.description),
|
||||
"description": deferred(attributes_table.c.description)
|
||||
})
|
||||
|
||||
Attribute.ID = association_proxy("info", "attributeID")
|
||||
Attribute.name = association_proxy("info", "attributeName")
|
||||
Attribute.description = association_proxy("info", "description")
|
||||
Attribute.published = association_proxy("info", "published")
|
||||
Attribute.displayName = association_proxy("info", "displayName{}".format(eos.config.lang))
|
||||
Attribute.displayName = association_proxy("info", "displayName")
|
||||
Attribute.highIsGood = association_proxy("info", "highIsGood")
|
||||
Attribute.iconID = association_proxy("info", "iconID")
|
||||
Attribute.icon = association_proxy("info", "icon")
|
||||
|
||||
@@ -17,23 +17,23 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Boolean, Column, Integer, String, Table
|
||||
from sqlalchemy.orm import deferred, mapper, synonym
|
||||
from sqlalchemy import Column, String, Integer, ForeignKey, Boolean, Table
|
||||
from sqlalchemy.orm import relation, mapper, synonym, deferred
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.gamedata import Category
|
||||
import eos.config
|
||||
from eos.gamedata import Category, Icon
|
||||
|
||||
categories_table = Table("invcategories", gamedata_meta,
|
||||
Column("categoryID", Integer, primary_key=True),
|
||||
*[Column("name{}".format(lang), String) for lang in eos.config.translation_mapping.values()],
|
||||
# Column("description", String), # deprecated
|
||||
Column("categoryName", String),
|
||||
Column("description", String),
|
||||
Column("published", Boolean),
|
||||
Column("iconID", Integer))
|
||||
Column("iconID", Integer, ForeignKey("icons.iconID")))
|
||||
|
||||
mapper(Category, categories_table,
|
||||
properties={
|
||||
"icon" : relation(Icon),
|
||||
"ID" : synonym("categoryID"),
|
||||
"displayName": synonym("name{}".format(eos.config.lang)),
|
||||
# "description": deferred(categories_table.c.description) # deprecated
|
||||
"name" : synonym("categoryName"),
|
||||
"description": deferred(categories_table.c.description)
|
||||
})
|
||||
|
||||
@@ -1,65 +0,0 @@
|
||||
# ===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
#
|
||||
# eos is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# eos is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Column, Float, Integer, Table, ForeignKey
|
||||
from sqlalchemy.orm import mapper, relation, synonym
|
||||
from sqlalchemy.ext.associationproxy import association_proxy
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.gamedata import DynamicItem, DynamicItemAttribute, DynamicItemItem, Item
|
||||
|
||||
from eos.gamedata import AttributeInfo
|
||||
|
||||
dynamic_table = Table("mutaplasmids", gamedata_meta,
|
||||
Column("typeID", ForeignKey("invtypes.typeID"), primary_key=True, index=True),
|
||||
Column("resultingTypeID", ForeignKey("invtypes.typeID"), primary_key=True))
|
||||
|
||||
dynamicAttributes_table = Table("mutaplasmidAttributes", gamedata_meta,
|
||||
Column("typeID", Integer, ForeignKey("mutaplasmids.typeID"), primary_key=True),
|
||||
Column("attributeID", ForeignKey("dgmattribs.attributeID"), primary_key=True),
|
||||
Column("min", Float),
|
||||
Column("max", Float))
|
||||
|
||||
dynamicApplicable_table = Table("mutaplasmidItems", gamedata_meta,
|
||||
Column("typeID", ForeignKey("mutaplasmids.typeID"), primary_key=True),
|
||||
Column("applicableTypeID", ForeignKey("invtypes.typeID"), primary_key=True),)
|
||||
|
||||
mapper(DynamicItem, dynamic_table, properties={
|
||||
"attributes": relation(DynamicItemAttribute),
|
||||
"item": relation(Item, foreign_keys=[dynamic_table.c.typeID]),
|
||||
"resultingItem": relation(Item, foreign_keys=[dynamic_table.c.resultingTypeID]),
|
||||
"ID": synonym("typeID"),
|
||||
})
|
||||
|
||||
mapper(DynamicItemAttribute, dynamicAttributes_table,
|
||||
properties={"info": relation(AttributeInfo, lazy=False)})
|
||||
|
||||
mapper(DynamicItemItem, dynamicApplicable_table, properties={
|
||||
"mutaplasmid": relation(DynamicItem, viewonly=True),
|
||||
})
|
||||
|
||||
DynamicItemAttribute.ID = association_proxy("info", "attributeID")
|
||||
DynamicItemAttribute.name = association_proxy("info", "attributeName")
|
||||
DynamicItemAttribute.description = association_proxy("info", "description")
|
||||
DynamicItemAttribute.published = association_proxy("info", "published")
|
||||
DynamicItemAttribute.displayName = association_proxy("info", "displayName")
|
||||
DynamicItemAttribute.highIsGood = association_proxy("info", "highIsGood")
|
||||
DynamicItemAttribute.iconID = association_proxy("info", "iconID")
|
||||
DynamicItemAttribute.icon = association_proxy("info", "icon")
|
||||
DynamicItemAttribute.unit = association_proxy("info", "unit")
|
||||
@@ -18,24 +18,24 @@
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Column, String, Integer, Boolean, ForeignKey, Table
|
||||
from sqlalchemy.orm import relation, mapper, synonym, deferred, backref
|
||||
from sqlalchemy.orm import relation, mapper, synonym, deferred
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.gamedata import Category, Group
|
||||
import eos.config
|
||||
from eos.gamedata import Category, Group, Icon
|
||||
|
||||
groups_table = Table("invgroups", gamedata_meta,
|
||||
Column("groupID", Integer, primary_key=True),
|
||||
*[Column("name{}".format(lang), String) for lang in eos.config.translation_mapping.values()],
|
||||
# Column("description", String), # deprecated
|
||||
Column("groupName", String),
|
||||
Column("description", String),
|
||||
Column("published", Boolean),
|
||||
Column("categoryID", Integer, ForeignKey("invcategories.categoryID")),
|
||||
Column("iconID", Integer))
|
||||
Column("iconID", Integer, ForeignKey("icons.iconID")))
|
||||
|
||||
mapper(Group, groups_table,
|
||||
properties={
|
||||
"category" : relation(Category, backref=backref("groups", cascade="all,delete")),
|
||||
"category" : relation(Category, backref="groups"),
|
||||
"icon" : relation(Icon),
|
||||
"ID" : synonym("groupID"),
|
||||
"displayName" : synonym("name{}".format(eos.config.lang)),
|
||||
# "description": deferred(groups_table.c.description) # deprecated
|
||||
"name" : synonym("groupName"),
|
||||
"description": deferred(groups_table.c.description)
|
||||
})
|
||||
|
||||
35
eos/db/gamedata/icon.py
Normal file
35
eos/db/gamedata/icon.py
Normal file
@@ -0,0 +1,35 @@
|
||||
# ===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
#
|
||||
# eos is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# eos is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Column, String, Integer, Table
|
||||
from sqlalchemy.orm import mapper, synonym, deferred
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.gamedata import Icon
|
||||
|
||||
icons_table = Table("icons", gamedata_meta,
|
||||
Column("iconID", Integer, primary_key=True),
|
||||
Column("description", String),
|
||||
Column("iconFile", String))
|
||||
|
||||
mapper(Icon, icons_table,
|
||||
properties={
|
||||
"ID" : synonym("iconID"),
|
||||
"description": deferred(icons_table.c.description)
|
||||
})
|
||||
@@ -1,33 +0,0 @@
|
||||
# ===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
#
|
||||
# eos is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# eos is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Column, String, Integer, Table
|
||||
from sqlalchemy.orm import mapper, synonym
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.gamedata import ImplantSet
|
||||
|
||||
implant_set_table = Table("implantsets", gamedata_meta,
|
||||
Column("setID", Integer, primary_key=True),
|
||||
Column("setName", String),
|
||||
Column("gradeName", String),
|
||||
Column("implants", String))
|
||||
|
||||
mapper(ImplantSet, implant_set_table,
|
||||
properties={"ID": synonym("setID")})
|
||||
@@ -17,67 +17,47 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, Table
|
||||
from sqlalchemy import Column, String, Integer, Boolean, ForeignKey, Table, Float
|
||||
from sqlalchemy.ext.associationproxy import association_proxy
|
||||
from sqlalchemy.orm import backref, deferred, mapper, relation, synonym
|
||||
from sqlalchemy.orm import relation, mapper, synonym, deferred
|
||||
from sqlalchemy.orm.collections import attribute_mapped_collection
|
||||
from eos.db.gamedata.effect import typeeffects_table
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.db.gamedata.dynamicAttributes import dynamicApplicable_table
|
||||
from eos.db.gamedata.effect import typeeffects_table
|
||||
from eos.gamedata import Attribute, DynamicItem, Effect, Group, Item, Traits, MetaGroup
|
||||
|
||||
import eos.config
|
||||
from eos.gamedata import Attribute, Effect, Group, Icon, Item, MetaType, Traits
|
||||
|
||||
items_table = Table("invtypes", gamedata_meta,
|
||||
Column("typeID", Integer, primary_key=True),
|
||||
*[Column("typeName{}".format(lang), String, index=True) for lang in eos.config.translation_mapping.values()],
|
||||
*[Column("typeDescription{}".format(lang), String) for lang in eos.config.translation_mapping.values()],
|
||||
Column("typeName", String, index=True),
|
||||
Column("description", String),
|
||||
Column("raceID", Integer),
|
||||
Column("factionID", Integer),
|
||||
Column("volume", Float),
|
||||
Column("mass", Float),
|
||||
Column("capacity", Float),
|
||||
Column("published", Boolean),
|
||||
Column("marketGroupID", Integer, ForeignKey("invmarketgroups.marketGroupID")),
|
||||
Column("iconID", Integer),
|
||||
Column("graphicID", Integer),
|
||||
Column("groupID", Integer, ForeignKey("invgroups.groupID"), index=True),
|
||||
Column("metaLevel", Integer),
|
||||
Column("metaGroupID", Integer, ForeignKey("invmetagroups.metaGroupID"), index=True),
|
||||
Column("variationParentTypeID", Integer, ForeignKey("invtypes.typeID"), index=True),
|
||||
Column("replacements", String),
|
||||
Column("reqskills", String),
|
||||
Column("requiredfor", String),
|
||||
)
|
||||
Column("iconID", Integer, ForeignKey("icons.iconID")),
|
||||
Column("groupID", Integer, ForeignKey("invgroups.groupID"), index=True))
|
||||
|
||||
from .metaGroup import metatypes_table # noqa
|
||||
from .traits import traits_table # noqa
|
||||
|
||||
props = {
|
||||
"group": relation(Group, backref=backref("items", cascade="all,delete")),
|
||||
mapper(Item, items_table,
|
||||
properties={
|
||||
"group" : relation(Group, backref="items"),
|
||||
"icon" : relation(Icon),
|
||||
"_Item__attributes": relation(Attribute, cascade='all, delete, delete-orphan', collection_class=attribute_mapped_collection('name')),
|
||||
"effects": relation(Effect, secondary=typeeffects_table, collection_class=attribute_mapped_collection('name')),
|
||||
"metaGroup": relation(MetaGroup, backref=backref("items", cascade="all,delete")),
|
||||
"varParent": relation(Item, backref=backref("varChildren", cascade="all,delete"), remote_side=items_table.c.typeID),
|
||||
"ID": synonym("typeID"),
|
||||
"name": synonym("typeName{}".format(eos.config.lang)),
|
||||
"description" : synonym("_description{}".format(eos.config.lang)),
|
||||
"traits": relation(
|
||||
Traits,
|
||||
primaryjoin=traits_table.c.typeID == items_table.c.typeID,
|
||||
uselist=False
|
||||
),
|
||||
"mutaplasmids": relation(
|
||||
DynamicItem,
|
||||
primaryjoin=dynamicApplicable_table.c.applicableTypeID == items_table.c.typeID,
|
||||
secondaryjoin=dynamicApplicable_table.c.typeID == DynamicItem.typeID,
|
||||
secondary=dynamicApplicable_table,
|
||||
backref="applicableItems",
|
||||
viewonly=True
|
||||
)
|
||||
}
|
||||
|
||||
# Create deferred columns shadowing all the description fields. The literal `description` property will dynamically
|
||||
# be assigned as synonym to one of these
|
||||
props.update({'_description' + v: deferred(items_table.c['typeDescription' + v]) for (k, v) in eos.config.translation_mapping.items()})
|
||||
|
||||
mapper(Item, items_table, properties=props)
|
||||
"metaGroup" : relation(MetaType,
|
||||
primaryjoin=metatypes_table.c.typeID == items_table.c.typeID,
|
||||
uselist=False),
|
||||
"ID" : synonym("typeID"),
|
||||
"name" : synonym("typeName"),
|
||||
"description" : deferred(items_table.c.description),
|
||||
"traits" : relation(Traits,
|
||||
primaryjoin=traits_table.c.typeID == items_table.c.typeID,
|
||||
uselist=False)
|
||||
})
|
||||
|
||||
Item.category = association_proxy("group", "category")
|
||||
|
||||
@@ -21,35 +21,24 @@ from sqlalchemy import Column, String, Integer, Boolean, ForeignKey, Table
|
||||
from sqlalchemy.orm import relation, mapper, synonym, deferred
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.gamedata import Item, MarketGroup
|
||||
import eos.config
|
||||
from eos.gamedata import Icon, Item, MarketGroup
|
||||
|
||||
marketgroups_table = Table("invmarketgroups", gamedata_meta,
|
||||
Column("marketGroupID", Integer, primary_key=True),
|
||||
*[Column("marketGroupName{}".format(lang), String) for lang in eos.config.translation_mapping.values()],
|
||||
*[Column("marketGroupDescription{}".format(lang), String) for lang in eos.config.translation_mapping.values()],
|
||||
Column("marketGroupName", String),
|
||||
Column("description", String),
|
||||
Column("hasTypes", Boolean),
|
||||
Column("parentGroupID", Integer,
|
||||
ForeignKey("invmarketgroups.marketGroupID", initially="DEFERRED", deferrable=True)),
|
||||
Column("iconID", Integer))
|
||||
|
||||
props = {
|
||||
"items": relation(Item, backref="marketGroup"),
|
||||
"parent": relation(MarketGroup, backref="children", remote_side=[marketgroups_table.c.marketGroupID]),
|
||||
"ID": synonym("marketGroupID"),
|
||||
"name": synonym("marketGroupName{}".format(eos.config.lang)),
|
||||
"description": synonym("_description{}".format(eos.config.lang)),
|
||||
}
|
||||
|
||||
# Create deferred columns shadowing all the description fields. The literal `description` property will dynamically
|
||||
# be assigned as synonym to one of these
|
||||
# this is mostly here to allow the db_update to be language-agnostic
|
||||
# todo: determine if we ever use market group descriptions... can we just get with of these?
|
||||
props.update({'_description' + v: deferred(marketgroups_table.c['marketGroupDescription' + v]) for (k, v) in eos.config.translation_mapping.items()})
|
||||
|
||||
mapper(
|
||||
MarketGroup,
|
||||
marketgroups_table,
|
||||
properties=props
|
||||
)
|
||||
ForeignKey("invmarketgroups.marketGroupID", initially="DEFERRED", deferrable=True)),
|
||||
Column("iconID", Integer, ForeignKey("icons.iconID")))
|
||||
|
||||
mapper(MarketGroup, marketgroups_table,
|
||||
properties={
|
||||
"items" : relation(Item, backref="marketGroup"),
|
||||
"parent" : relation(MarketGroup, backref="children",
|
||||
remote_side=[marketgroups_table.c.marketGroupID]),
|
||||
"icon" : relation(Icon),
|
||||
"ID" : synonym("marketGroupID"),
|
||||
"name" : synonym("marketGroupName"),
|
||||
"description": deferred(marketgroups_table.c.description)
|
||||
})
|
||||
|
||||
@@ -17,25 +17,35 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, String
|
||||
from sqlalchemy.orm import mapper, synonym
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, String
|
||||
from sqlalchemy.ext.associationproxy import association_proxy
|
||||
from sqlalchemy.orm import relation, mapper, synonym
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.gamedata import MetaGroup
|
||||
import eos.config
|
||||
from eos.db.gamedata.item import items_table
|
||||
from eos.gamedata import Item, MetaGroup, MetaType
|
||||
|
||||
metagroups_table = Table(
|
||||
"invmetagroups",
|
||||
gamedata_meta,
|
||||
Column("metaGroupID", Integer, primary_key=True),
|
||||
*[Column("metaGroupName{}".format(lang), String) for lang in eos.config.translation_mapping.values()],
|
||||
)
|
||||
metagroups_table = Table("invmetagroups", gamedata_meta,
|
||||
Column("metaGroupID", Integer, primary_key=True),
|
||||
Column("metaGroupName", String))
|
||||
|
||||
mapper(
|
||||
MetaGroup,
|
||||
metagroups_table,
|
||||
properties={
|
||||
"ID" : synonym("metaGroupID"),
|
||||
"name": synonym("metaGroupName{}".format(eos.config.lang))
|
||||
}
|
||||
)
|
||||
metatypes_table = Table("invmetatypes", gamedata_meta,
|
||||
Column("typeID", Integer, ForeignKey("invtypes.typeID"), primary_key=True),
|
||||
Column("parentTypeID", Integer, ForeignKey("invtypes.typeID")),
|
||||
Column("metaGroupID", Integer, ForeignKey("invmetagroups.metaGroupID")))
|
||||
|
||||
mapper(MetaGroup, metagroups_table,
|
||||
properties={
|
||||
"ID" : synonym("metaGroupID"),
|
||||
"name": synonym("metaGroupName")
|
||||
})
|
||||
|
||||
mapper(MetaType, metatypes_table,
|
||||
properties={
|
||||
"ID" : synonym("metaGroupID"),
|
||||
"parent": relation(Item, primaryjoin=metatypes_table.c.parentTypeID == items_table.c.typeID),
|
||||
"items" : relation(Item, primaryjoin=metatypes_table.c.typeID == items_table.c.typeID),
|
||||
"info" : relation(MetaGroup, lazy=False)
|
||||
})
|
||||
|
||||
MetaType.name = association_proxy("info", "name")
|
||||
|
||||
@@ -17,16 +17,15 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy.inspection import inspect
|
||||
from sqlalchemy.orm import aliased, exc, join
|
||||
from sqlalchemy.orm import join, exc, aliased
|
||||
from sqlalchemy.sql import and_, or_, select
|
||||
|
||||
import eos.config
|
||||
from eos.db import get_gamedata_session
|
||||
from eos.db.gamedata.item import items_table
|
||||
from eos.db import gamedata_session
|
||||
from eos.db.gamedata.metaGroup import metatypes_table, items_table
|
||||
from eos.db.gamedata.group import groups_table
|
||||
from eos.db.util import processEager, processWhere
|
||||
from eos.gamedata import AlphaClone, Attribute, AttributeInfo, Category, DynamicItem, Group, Item, MarketGroup, MetaData, MetaGroup, ImplantSet
|
||||
from eos.gamedata import AlphaClone, Attribute, Category, Group, Item, MarketGroup, MetaGroup, AttributeInfo, MetaData
|
||||
|
||||
cache = {}
|
||||
configVal = getattr(eos.config, "gamedataCache", None)
|
||||
@@ -64,7 +63,7 @@ else:
|
||||
return deco
|
||||
|
||||
|
||||
def sqlizeNormalString(line):
|
||||
def sqlizeString(line):
|
||||
# Escape backslashes first, as they will be as escape symbol in queries
|
||||
# Then escape percent and underscore signs
|
||||
# Finally, replace generic wildcards with sql-style wildcards
|
||||
@@ -79,65 +78,24 @@ itemNameMap = {}
|
||||
def getItem(lookfor, eager=None):
|
||||
if isinstance(lookfor, int):
|
||||
if eager is None:
|
||||
item = get_gamedata_session().query(Item).get(lookfor)
|
||||
item = gamedata_session.query(Item).get(lookfor)
|
||||
else:
|
||||
item = get_gamedata_session().query(Item).options(*processEager(eager)).filter(Item.ID == lookfor).first()
|
||||
elif isinstance(lookfor, str):
|
||||
item = gamedata_session.query(Item).options(*processEager(eager)).filter(Item.ID == lookfor).first()
|
||||
elif isinstance(lookfor, basestring):
|
||||
if lookfor in itemNameMap:
|
||||
id = itemNameMap[lookfor]
|
||||
if eager is None:
|
||||
item = get_gamedata_session().query(Item).get(id)
|
||||
item = gamedata_session.query(Item).get(id)
|
||||
else:
|
||||
item = get_gamedata_session().query(Item).options(*processEager(eager)).filter(Item.ID == id).first()
|
||||
item = gamedata_session.query(Item).options(*processEager(eager)).filter(Item.ID == id).first()
|
||||
else:
|
||||
# Item names are unique, so we can use first() instead of one()
|
||||
item = get_gamedata_session().query(Item).options(*processEager(eager)).filter(Item.typeName == lookfor).first()
|
||||
if item is not None:
|
||||
itemNameMap[lookfor] = item.ID
|
||||
item = gamedata_session.query(Item).options(*processEager(eager)).filter(Item.name == lookfor).first()
|
||||
itemNameMap[lookfor] = item.ID
|
||||
else:
|
||||
raise TypeError("Need integer or string as argument")
|
||||
return item
|
||||
|
||||
@cachedQuery(1, "itemIDs")
|
||||
def getItems(itemIDs, eager=None):
|
||||
if not isinstance(itemIDs, (tuple, list, set)) or not all(isinstance(t, int) for t in itemIDs):
|
||||
raise TypeError("Need iterable of integers as argument")
|
||||
if eager is None:
|
||||
items = get_gamedata_session().query(Item).filter(Item.ID.in_(itemIDs)).all()
|
||||
else:
|
||||
items = get_gamedata_session().query(Item).options(*processEager(eager)).filter(Item.ID.in_(itemIDs)).all()
|
||||
return items
|
||||
|
||||
|
||||
def getMutaplasmid(lookfor, eager=None):
|
||||
if isinstance(lookfor, int):
|
||||
item = get_gamedata_session().query(DynamicItem).filter(DynamicItem.ID == lookfor).first()
|
||||
else:
|
||||
raise TypeError("Need integer as argument")
|
||||
return item
|
||||
|
||||
|
||||
def getItemWithBaseItemAttribute(lookfor, baseItemID, eager=None):
|
||||
# A lot of this is described in more detail in #1597
|
||||
item = get_gamedata_session().query(Item).get(lookfor)
|
||||
base = getItem(baseItemID)
|
||||
|
||||
# we have to load all attributes for this object, otherwise we'll lose access to them when we expunge.
|
||||
# todo: figure out a way to eagerly load all these via the query...
|
||||
for x in [*inspect(Item).relationships.keys(), 'description']:
|
||||
getattr(item, x)
|
||||
|
||||
# Copy over the attributes from the base, but ise the items attributes when there's an overlap
|
||||
# WARNING: the attribute object still has the old typeID. I don't believe we access this typeID anywhere in the code,
|
||||
# but should keep this in mind for now.
|
||||
item._Item__attributes = {**base.attributes, **item.attributes}
|
||||
|
||||
# Expunge the item form the session. This is required to have different Abyssal / Base combinations loaded in memory.
|
||||
# Without expunging it, once one Abyssal Web is created, SQLAlchmey will use it for all others. We don't want this,
|
||||
# we want to generate a completely new object to work with
|
||||
get_gamedata_session().expunge(item)
|
||||
return item
|
||||
|
||||
|
||||
@cachedQuery(1, "lookfor")
|
||||
def getItems(lookfor, eager=None):
|
||||
@@ -158,7 +116,7 @@ def getItems(lookfor, eager=None):
|
||||
|
||||
if len(toGet) > 0:
|
||||
# Get items that aren't currently cached, and store them in the cache
|
||||
items = get_gamedata_session().query(Item).filter(Item.ID.in_(toGet)).all()
|
||||
items = gamedata_session.query(Item).filter(Item.ID.in_(toGet)).all()
|
||||
for item in items:
|
||||
cache[(item.ID, None)] = item
|
||||
results += items
|
||||
@@ -172,9 +130,9 @@ def getItems(lookfor, eager=None):
|
||||
def getAlphaClone(lookfor, eager=None):
|
||||
if isinstance(lookfor, int):
|
||||
if eager is None:
|
||||
item = get_gamedata_session().query(AlphaClone).get(lookfor)
|
||||
item = gamedata_session.query(AlphaClone).get(lookfor)
|
||||
else:
|
||||
item = get_gamedata_session().query(AlphaClone).options(*processEager(eager)).filter(AlphaClone.ID == lookfor).first()
|
||||
item = gamedata_session.query(AlphaClone).options(*processEager(eager)).filter(AlphaClone.ID == lookfor).first()
|
||||
else:
|
||||
raise TypeError("Need integer as argument")
|
||||
return item
|
||||
@@ -182,7 +140,7 @@ def getAlphaClone(lookfor, eager=None):
|
||||
|
||||
def getAlphaCloneList(eager=None):
|
||||
eager = processEager(eager)
|
||||
clones = get_gamedata_session().query(AlphaClone).options(*eager).all()
|
||||
clones = gamedata_session.query(AlphaClone).options(*eager).all()
|
||||
return clones
|
||||
|
||||
|
||||
@@ -193,21 +151,20 @@ groupNameMap = {}
|
||||
def getGroup(lookfor, eager=None):
|
||||
if isinstance(lookfor, int):
|
||||
if eager is None:
|
||||
group = get_gamedata_session().query(Group).get(lookfor)
|
||||
group = gamedata_session.query(Group).get(lookfor)
|
||||
else:
|
||||
group = get_gamedata_session().query(Group).options(*processEager(eager)).filter(Group.ID == lookfor).first()
|
||||
elif isinstance(lookfor, str):
|
||||
group = gamedata_session.query(Group).options(*processEager(eager)).filter(Group.ID == lookfor).first()
|
||||
elif isinstance(lookfor, basestring):
|
||||
if lookfor in groupNameMap:
|
||||
id = groupNameMap[lookfor]
|
||||
if eager is None:
|
||||
group = get_gamedata_session().query(Group).get(id)
|
||||
group = gamedata_session.query(Group).get(id)
|
||||
else:
|
||||
group = get_gamedata_session().query(Group).options(*processEager(eager)).filter(Group.ID == id).first()
|
||||
group = gamedata_session.query(Group).options(*processEager(eager)).filter(Group.ID == id).first()
|
||||
else:
|
||||
# Group names are unique, so we can use first() instead of one()
|
||||
group = get_gamedata_session().query(Group).options(*processEager(eager)).filter(Group.name == lookfor).first()
|
||||
if group is not None:
|
||||
groupNameMap[lookfor] = group.ID
|
||||
group = gamedata_session.query(Group).options(*processEager(eager)).filter(Group.name == lookfor).first()
|
||||
groupNameMap[lookfor] = group.ID
|
||||
else:
|
||||
raise TypeError("Need integer or string as argument")
|
||||
return group
|
||||
@@ -220,24 +177,23 @@ categoryNameMap = {}
|
||||
def getCategory(lookfor, eager=None):
|
||||
if isinstance(lookfor, int):
|
||||
if eager is None:
|
||||
category = get_gamedata_session().query(Category).get(lookfor)
|
||||
category = gamedata_session.query(Category).get(lookfor)
|
||||
else:
|
||||
category = get_gamedata_session().query(Category).options(*processEager(eager)).filter(
|
||||
category = gamedata_session.query(Category).options(*processEager(eager)).filter(
|
||||
Category.ID == lookfor).first()
|
||||
elif isinstance(lookfor, str):
|
||||
elif isinstance(lookfor, basestring):
|
||||
if lookfor in categoryNameMap:
|
||||
id = categoryNameMap[lookfor]
|
||||
if eager is None:
|
||||
category = get_gamedata_session().query(Category).get(id)
|
||||
category = gamedata_session.query(Category).get(id)
|
||||
else:
|
||||
category = get_gamedata_session().query(Category).options(*processEager(eager)).filter(
|
||||
category = gamedata_session.query(Category).options(*processEager(eager)).filter(
|
||||
Category.ID == id).first()
|
||||
else:
|
||||
# Category names are unique, so we can use first() instead of one()
|
||||
category = get_gamedata_session().query(Category).options(*processEager(eager)).filter(
|
||||
category = gamedata_session.query(Category).options(*processEager(eager)).filter(
|
||||
Category.name == lookfor).first()
|
||||
if category is not None:
|
||||
categoryNameMap[lookfor] = category.ID
|
||||
categoryNameMap[lookfor] = category.ID
|
||||
else:
|
||||
raise TypeError("Need integer or string as argument")
|
||||
return category
|
||||
@@ -250,72 +206,58 @@ metaGroupNameMap = {}
|
||||
def getMetaGroup(lookfor, eager=None):
|
||||
if isinstance(lookfor, int):
|
||||
if eager is None:
|
||||
metaGroup = get_gamedata_session().query(MetaGroup).get(lookfor)
|
||||
metaGroup = gamedata_session.query(MetaGroup).get(lookfor)
|
||||
else:
|
||||
metaGroup = get_gamedata_session().query(MetaGroup).options(*processEager(eager)).filter(
|
||||
metaGroup = gamedata_session.query(MetaGroup).options(*processEager(eager)).filter(
|
||||
MetaGroup.ID == lookfor).first()
|
||||
elif isinstance(lookfor, str):
|
||||
elif isinstance(lookfor, basestring):
|
||||
if lookfor in metaGroupNameMap:
|
||||
id = metaGroupNameMap[lookfor]
|
||||
if eager is None:
|
||||
metaGroup = get_gamedata_session().query(MetaGroup).get(id)
|
||||
metaGroup = gamedata_session.query(MetaGroup).get(id)
|
||||
else:
|
||||
metaGroup = get_gamedata_session().query(MetaGroup).options(*processEager(eager)).filter(
|
||||
metaGroup = gamedata_session.query(MetaGroup).options(*processEager(eager)).filter(
|
||||
MetaGroup.ID == id).first()
|
||||
else:
|
||||
# MetaGroup names are unique, so we can use first() instead of one()
|
||||
metaGroup = get_gamedata_session().query(MetaGroup).options(*processEager(eager)).filter(
|
||||
MetaGroup.metaGroupName == lookfor).first()
|
||||
if metaGroup is not None:
|
||||
metaGroupNameMap[lookfor] = metaGroup.ID
|
||||
metaGroup = gamedata_session.query(MetaGroup).options(*processEager(eager)).filter(
|
||||
MetaGroup.name == lookfor).first()
|
||||
metaGroupNameMap[lookfor] = metaGroup.ID
|
||||
else:
|
||||
raise TypeError("Need integer or string as argument")
|
||||
return metaGroup
|
||||
|
||||
|
||||
def getMetaGroups():
|
||||
return get_gamedata_session().query(MetaGroup).all()
|
||||
|
||||
|
||||
@cachedQuery(1, "lookfor")
|
||||
def getMarketGroup(lookfor, eager=None):
|
||||
if isinstance(lookfor, int):
|
||||
if eager is None:
|
||||
marketGroup = get_gamedata_session().query(MarketGroup).get(lookfor)
|
||||
marketGroup = gamedata_session.query(MarketGroup).get(lookfor)
|
||||
else:
|
||||
marketGroup = get_gamedata_session().query(MarketGroup).options(*processEager(eager)).filter(
|
||||
marketGroup = gamedata_session.query(MarketGroup).options(*processEager(eager)).filter(
|
||||
MarketGroup.ID == lookfor).first()
|
||||
else:
|
||||
raise TypeError("Need integer as argument")
|
||||
return marketGroup
|
||||
|
||||
|
||||
def getMarketTreeNodeIds(rootNodeIds):
|
||||
allIds = set()
|
||||
addedIds = set(rootNodeIds)
|
||||
while addedIds:
|
||||
allIds.update(addedIds)
|
||||
addedIds = {mg.ID for mg in get_gamedata_session().query(MarketGroup).filter(MarketGroup.parentGroupID.in_(addedIds))}
|
||||
return allIds
|
||||
|
||||
|
||||
@cachedQuery(2, "where", "filter")
|
||||
def getItemsByCategory(filter, where=None, eager=None):
|
||||
if isinstance(filter, int):
|
||||
filter = Category.ID == filter
|
||||
elif isinstance(filter, str):
|
||||
elif isinstance(filter, basestring):
|
||||
filter = Category.name == filter
|
||||
else:
|
||||
raise TypeError("Need integer or string as argument")
|
||||
|
||||
filter = processWhere(filter, where)
|
||||
return get_gamedata_session().query(Item).options(*processEager(eager)).join(Item.group, Group.category).filter(
|
||||
return gamedata_session.query(Item).options(*processEager(eager)).join(Item.group, Group.category).filter(
|
||||
filter).all()
|
||||
|
||||
|
||||
@cachedQuery(3, "where", "nameLike", "join")
|
||||
def searchItems(nameLike, where=None, join=None, eager=None):
|
||||
if not isinstance(nameLike, str):
|
||||
if not isinstance(nameLike, basestring):
|
||||
raise TypeError("Need string as argument")
|
||||
|
||||
if join is None:
|
||||
@@ -324,9 +266,9 @@ def searchItems(nameLike, where=None, join=None, eager=None):
|
||||
if not hasattr(join, "__iter__"):
|
||||
join = (join,)
|
||||
|
||||
items = get_gamedata_session().query(Item).options(*processEager(eager)).join(*join)
|
||||
items = gamedata_session.query(Item).options(*processEager(eager)).join(*join)
|
||||
for token in nameLike.split(' '):
|
||||
token_safe = "%{0}%".format(sqlizeNormalString(token))
|
||||
token_safe = u"%{0}%".format(sqlizeString(token))
|
||||
if where is not None:
|
||||
items = items.filter(and_(Item.name.like(token_safe, escape="\\"), where))
|
||||
else:
|
||||
@@ -335,35 +277,14 @@ def searchItems(nameLike, where=None, join=None, eager=None):
|
||||
return items
|
||||
|
||||
|
||||
@cachedQuery(3, "tokens", "where", "join")
|
||||
def searchItemsRegex(tokens, where=None, join=None, eager=None):
|
||||
if not isinstance(tokens, (tuple, list)) or not all(isinstance(t, str) for t in tokens):
|
||||
raise TypeError("Need tuple or list of strings as argument")
|
||||
|
||||
if join is None:
|
||||
join = tuple()
|
||||
|
||||
if not hasattr(join, "__iter__"):
|
||||
join = (join,)
|
||||
|
||||
items = get_gamedata_session().query(Item).options(*processEager(eager)).join(*join)
|
||||
for token in tokens:
|
||||
if where is not None:
|
||||
items = items.filter(and_(Item.name.op('regexp')(token), where))
|
||||
else:
|
||||
items = items.filter(Item.name.op('regexp')(token))
|
||||
items = items.limit(100).all()
|
||||
return items
|
||||
|
||||
|
||||
@cachedQuery(3, "where", "nameLike", "join")
|
||||
def searchSkills(nameLike, where=None, eager=None):
|
||||
if not isinstance(nameLike, str):
|
||||
if not isinstance(nameLike, basestring):
|
||||
raise TypeError("Need string as argument")
|
||||
|
||||
items = get_gamedata_session().query(Item).options(*processEager(eager)).join(Item.group, Group.category)
|
||||
items = gamedata_session.query(Item).options(*processEager(eager)).join(Item.group, Group.category)
|
||||
for token in nameLike.split(' '):
|
||||
token_safe = "%{0}%".format(sqlizeNormalString(token))
|
||||
token_safe = u"%{0}%".format(sqlizeString(token))
|
||||
if where is not None:
|
||||
items = items.filter(and_(Item.name.like(token_safe, escape="\\"), Category.ID == 16, where))
|
||||
else:
|
||||
@@ -381,9 +302,11 @@ def getVariations(itemids, groupIDs=None, where=None, eager=None):
|
||||
if len(itemids) == 0:
|
||||
return []
|
||||
|
||||
itemfilter = or_(*(items_table.c.variationParentTypeID == itemid for itemid in itemids))
|
||||
itemfilter = or_(*(metatypes_table.c.parentTypeID == itemid for itemid in itemids))
|
||||
filter = processWhere(itemfilter, where)
|
||||
vars = get_gamedata_session().query(Item).options(*processEager(eager)).filter(filter).all()
|
||||
joinon = items_table.c.typeID == metatypes_table.c.typeID
|
||||
vars = gamedata_session.query(Item).options(*processEager(eager)).join((metatypes_table, joinon)).filter(
|
||||
filter).all()
|
||||
|
||||
if vars:
|
||||
return vars
|
||||
@@ -391,7 +314,7 @@ def getVariations(itemids, groupIDs=None, where=None, eager=None):
|
||||
itemfilter = or_(*(groups_table.c.groupID == groupID for groupID in groupIDs))
|
||||
filter = processWhere(itemfilter, where)
|
||||
joinon = items_table.c.groupID == groups_table.c.groupID
|
||||
vars = get_gamedata_session().query(Item).options(*processEager(eager)).join((groups_table, joinon)).filter(
|
||||
vars = gamedata_session.query(Item).options(*processEager(eager)).join((groups_table, joinon)).filter(
|
||||
filter).all()
|
||||
|
||||
return vars
|
||||
@@ -399,14 +322,14 @@ def getVariations(itemids, groupIDs=None, where=None, eager=None):
|
||||
|
||||
@cachedQuery(1, "attr")
|
||||
def getAttributeInfo(attr, eager=None):
|
||||
if isinstance(attr, str):
|
||||
if isinstance(attr, basestring):
|
||||
filter = AttributeInfo.name == attr
|
||||
elif isinstance(attr, int):
|
||||
filter = AttributeInfo.ID == attr
|
||||
else:
|
||||
raise TypeError("Need integer or string as argument")
|
||||
try:
|
||||
result = get_gamedata_session().query(AttributeInfo).options(*processEager(eager)).filter(filter).one()
|
||||
result = gamedata_session.query(AttributeInfo).options(*processEager(eager)).filter(filter).one()
|
||||
except exc.NoResultFound:
|
||||
result = None
|
||||
return result
|
||||
@@ -414,8 +337,8 @@ def getAttributeInfo(attr, eager=None):
|
||||
|
||||
@cachedQuery(1, "field")
|
||||
def getMetaData(field):
|
||||
if isinstance(field, str):
|
||||
data = get_gamedata_session().query(MetaData).get(field)
|
||||
if isinstance(field, basestring):
|
||||
data = gamedata_session.query(MetaData).get(field)
|
||||
else:
|
||||
raise TypeError("Need string as argument")
|
||||
return data
|
||||
@@ -434,30 +357,27 @@ def directAttributeRequest(itemIDs, attrIDs):
|
||||
and_(Attribute.attributeID.in_(attrIDs), Item.typeID.in_(itemIDs)),
|
||||
from_obj=[join(Attribute, Item)])
|
||||
|
||||
result = get_gamedata_session().execute(q).fetchall()
|
||||
result = gamedata_session.execute(q).fetchall()
|
||||
return result
|
||||
|
||||
|
||||
def getAbyssalTypes():
|
||||
return set([r.resultingTypeID for r in get_gamedata_session().query(DynamicItem.resultingTypeID).distinct()])
|
||||
def getRequiredFor(itemID, attrMapping):
|
||||
Attribute1 = aliased(Attribute)
|
||||
Attribute2 = aliased(Attribute)
|
||||
|
||||
skillToLevelClauses = []
|
||||
|
||||
for attrSkill, attrLevel in attrMapping.iteritems():
|
||||
skillToLevelClauses.append(and_(Attribute1.attributeID == attrSkill, Attribute2.attributeID == attrLevel))
|
||||
|
||||
queryOr = or_(*skillToLevelClauses)
|
||||
|
||||
q = select((Attribute2.typeID, Attribute2.value),
|
||||
and_(Attribute1.value == itemID, queryOr),
|
||||
from_obj=[
|
||||
join(Attribute1, Attribute2, Attribute1.typeID == Attribute2.typeID)
|
||||
])
|
||||
|
||||
result = gamedata_session.execute(q).fetchall()
|
||||
|
||||
@cachedQuery(1, "itemID")
|
||||
def getDynamicItem(itemID, eager=None):
|
||||
try:
|
||||
if isinstance(itemID, int):
|
||||
if eager is None:
|
||||
result = get_gamedata_session().query(DynamicItem).filter(DynamicItem.ID == itemID).one()
|
||||
else:
|
||||
result = get_gamedata_session().query(DynamicItem).options(*processEager(eager)).filter(DynamicItem.ID == itemID).one()
|
||||
else:
|
||||
raise TypeError("Need integer as argument")
|
||||
except exc.NoResultFound:
|
||||
result = None
|
||||
return result
|
||||
|
||||
|
||||
@cachedQuery(1, "lookfor")
|
||||
def getAllImplantSets():
|
||||
implantSets = get_gamedata_session().query(ImplantSet).all()
|
||||
return implantSets
|
||||
|
||||
@@ -1,21 +1,11 @@
|
||||
from sqlalchemy import Column, Table, Integer, String, ForeignKey
|
||||
from sqlalchemy.orm import mapper, synonym
|
||||
from sqlalchemy.orm import mapper
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.gamedata import Traits
|
||||
import eos.config
|
||||
|
||||
traits_table = Table(
|
||||
"invtraits",
|
||||
gamedata_meta,
|
||||
Column("typeID", Integer, ForeignKey("invtypes.typeID"), primary_key=True),
|
||||
*[Column("traitText{}".format(lang), String) for lang in eos.config.translation_mapping.values()],
|
||||
)
|
||||
traits_table = Table("invtraits", gamedata_meta,
|
||||
Column("typeID", Integer, ForeignKey("invtypes.typeID"), primary_key=True),
|
||||
Column("traitText", String))
|
||||
|
||||
mapper(
|
||||
Traits,
|
||||
traits_table,
|
||||
properties={
|
||||
"display": synonym("traitText{}".format(eos.config.lang)),
|
||||
}
|
||||
)
|
||||
mapper(Traits, traits_table)
|
||||
|
||||
@@ -22,13 +22,11 @@ from sqlalchemy.orm import mapper, synonym
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.gamedata import Unit
|
||||
import eos.config
|
||||
|
||||
groups_table = Table("dgmunits", gamedata_meta,
|
||||
Column("unitID", Integer, primary_key=True),
|
||||
Column("unitName", String),
|
||||
*[Column("displayName{}".format(lang), String) for lang in eos.config.translation_mapping.values()],
|
||||
)
|
||||
Column("displayName", String))
|
||||
|
||||
mapper(Unit, groups_table,
|
||||
properties={
|
||||
|
||||
@@ -3,7 +3,7 @@ import shutil
|
||||
import time
|
||||
|
||||
import config
|
||||
from . import migrations
|
||||
import migrations
|
||||
|
||||
pyfalog = Logger(__name__)
|
||||
|
||||
@@ -34,7 +34,7 @@ def update(saveddata_engine):
|
||||
|
||||
shutil.copyfile(config.saveDB, toFile)
|
||||
|
||||
for version in range(dbVersion, appVersion):
|
||||
for version in xrange(dbVersion, appVersion):
|
||||
func = migrations.updates[version + 1]
|
||||
if func:
|
||||
pyfalog.info("Applying database update: {0}", version + 1)
|
||||
|
||||
@@ -8,25 +8,23 @@ many upgrade files as there are database versions (version 5 would include
|
||||
upgrade files 1-5)
|
||||
"""
|
||||
|
||||
import pkgutil
|
||||
import re
|
||||
|
||||
from eos.utils.pyinst_support import iterNamespace
|
||||
|
||||
updates = {}
|
||||
appVersion = 0
|
||||
|
||||
prefix = __name__ + "."
|
||||
|
||||
for modName in iterNamespace(__name__, __path__):
|
||||
for importer, modname, ispkg in pkgutil.iter_modules(__path__, prefix):
|
||||
# loop through python files, extracting update number and function, and
|
||||
# adding it to a list
|
||||
modname_tail = modName.rsplit('.', 1)[-1]
|
||||
modname_tail = modname.rsplit('.', 1)[-1]
|
||||
module = __import__(modname, fromlist=True)
|
||||
m = re.match("^upgrade(?P<index>\d+)$", modname_tail)
|
||||
if not m:
|
||||
continue
|
||||
index = int(m.group("index"))
|
||||
appVersion = max(appVersion, index)
|
||||
module = __import__(modName, fromlist=True)
|
||||
upgrade = getattr(module, "upgrade", False)
|
||||
if upgrade:
|
||||
updates[index] = upgrade
|
||||
|
||||
@@ -91,7 +91,7 @@ def upgrade(saveddata_engine):
|
||||
saveddata_engine.execute("ALTER TABLE fits ADD COLUMN targetResistsID INTEGER;")
|
||||
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for replacement_item, list in CONVERSIONS.iteritems():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
|
||||
@@ -108,7 +108,7 @@ CONVERSIONS = {
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for replacement_item, list in CONVERSIONS.iteritems():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
|
||||
@@ -332,7 +332,7 @@ CONVERSIONS = {
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for replacement_item, list in CONVERSIONS.iteritems():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
|
||||
@@ -33,13 +33,9 @@ def upgrade(saveddata_engine):
|
||||
try:
|
||||
saveddata_session.execute(commandFits_table.insert(),
|
||||
{"boosterID": value, "boostedID": boosted, "active": 1})
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except Exception:
|
||||
pass
|
||||
saveddata_session.commit()
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except:
|
||||
# Shouldn't fail unless you have updated database without the old fleet schema and manually modify the database version
|
||||
# If it does, simply fail. Fleet data migration isn't critically important here
|
||||
|
||||
@@ -60,7 +60,7 @@ CONVERSIONS = {
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for replacement_item, list in CONVERSIONS.iteritems():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
|
||||
@@ -14,7 +14,7 @@ def upgrade(saveddata_engine):
|
||||
"boosters": 2,
|
||||
"cargo": 2,
|
||||
"characters": 2,
|
||||
# "crest": 1,
|
||||
"crest": 1,
|
||||
"damagePatterns": 2,
|
||||
"drones": 2,
|
||||
"fighters": 2,
|
||||
@@ -29,7 +29,7 @@ def upgrade(saveddata_engine):
|
||||
"targetResists": 2
|
||||
}
|
||||
|
||||
for table in list(tables.keys()):
|
||||
for table in tables.keys():
|
||||
|
||||
# midnight brain, there's probably a much more simple way to do this, but fuck it
|
||||
if tables[table] > 0:
|
||||
|
||||
@@ -4204,7 +4204,7 @@ conversion2 = {
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# First we want to get a list of fittings that are completely fitted out with subsystems
|
||||
oldItems = [str(x) for x in conversion2.keys()]
|
||||
oldItems = [str(x) for x in conversion2.iterkeys()]
|
||||
|
||||
# I can't figure out a way to get IN operator to work when supplying a list using a parameterized query. So I'm
|
||||
# doing it the shitty way by formatting the SQL string. Don't do this kids!
|
||||
@@ -4235,13 +4235,11 @@ def upgrade(saveddata_engine):
|
||||
|
||||
# And last but not least, delete the last subsystem
|
||||
saveddata_engine.execute("DELETE FROM modules WHERE ID = ?", (oldModules[4][0],))
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except:
|
||||
# if something fails, fuck it, we tried. It'll default to the generic conversion below
|
||||
continue
|
||||
|
||||
for oldItem, newItem in conversion2.items():
|
||||
for oldItem, newItem in conversion2.iteritems():
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(newItem, oldItem))
|
||||
saveddata_engine.execute('UPDATE "cargo" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
"""
|
||||
Migration 28
|
||||
|
||||
- adds baseItemID and mutaplasmidID to modules table
|
||||
"""
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
try:
|
||||
saveddata_engine.execute("SELECT baseItemID FROM modules LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE modules ADD COLUMN baseItemID INT;")
|
||||
|
||||
try:
|
||||
saveddata_engine.execute("SELECT mutaplasmidID FROM modules LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE modules ADD COLUMN mutaplasmidID INT;")
|
||||
@@ -1,18 +0,0 @@
|
||||
"""
|
||||
Migration 29
|
||||
|
||||
- adds spoolType and spoolAmount to modules table
|
||||
"""
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
try:
|
||||
saveddata_engine.execute("SELECT spoolType FROM modules LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE modules ADD COLUMN spoolType INT;")
|
||||
|
||||
try:
|
||||
saveddata_engine.execute("SELECT spoolAmount FROM modules LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE modules ADD COLUMN spoolAmount FLOAT;")
|
||||
@@ -1,17 +0,0 @@
|
||||
"""
|
||||
Migration 30
|
||||
|
||||
- changes to prices table
|
||||
"""
|
||||
|
||||
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
try:
|
||||
saveddata_engine.execute("SELECT status FROM prices LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
# Just drop table, table will be re-created by sqlalchemy and
|
||||
# data will be re-fetched
|
||||
saveddata_engine.execute("DROP TABLE prices;")
|
||||
@@ -1,15 +0,0 @@
|
||||
"""
|
||||
Migration 31
|
||||
|
||||
- added fit system security column
|
||||
"""
|
||||
|
||||
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
try:
|
||||
saveddata_engine.execute("SELECT systemSecurity FROM fits LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE fits ADD COLUMN systemSecurity INT")
|
||||
@@ -1,16 +0,0 @@
|
||||
"""
|
||||
Migration 32
|
||||
|
||||
- added speed, sig and radius columns to targetResists table
|
||||
"""
|
||||
|
||||
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
for column in ('maxVelocity', 'signatureRadius', 'radius'):
|
||||
try:
|
||||
saveddata_engine.execute("SELECT {} FROM targetResists LIMIT 1;".format(column))
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE targetResists ADD COLUMN {} FLOAT;".format(column))
|
||||
@@ -1,30 +0,0 @@
|
||||
"""
|
||||
Migration 33
|
||||
|
||||
Allow use of floats in damage pattern values
|
||||
"""
|
||||
|
||||
tmpTable = """
|
||||
CREATE TABLE "damagePatternsTemp" (
|
||||
"ID" INTEGER NOT NULL,
|
||||
"name" VARCHAR,
|
||||
"emAmount" FLOAT,
|
||||
"thermalAmount" FLOAT,
|
||||
"kineticAmount" FLOAT,
|
||||
"explosiveAmount" FLOAT,
|
||||
"ownerID" INTEGER,
|
||||
"created" DATETIME,
|
||||
"modified" DATETIME,
|
||||
PRIMARY KEY ("ID"),
|
||||
FOREIGN KEY("ownerID") REFERENCES users ("ID")
|
||||
)
|
||||
"""
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
saveddata_engine.execute(tmpTable)
|
||||
saveddata_engine.execute(
|
||||
'INSERT INTO damagePatternsTemp (ID, name, emAmount, thermalAmount, kineticAmount, explosiveAmount, ownerID, created, modified) '
|
||||
'SELECT ID, name, emAmount, thermalAmount, kineticAmount, explosiveAmount, ownerID, created, modified FROM damagePatterns')
|
||||
saveddata_engine.execute('DROP TABLE damagePatterns')
|
||||
saveddata_engine.execute('ALTER TABLE damagePatternsTemp RENAME TO damagePatterns')
|
||||
@@ -1,25 +0,0 @@
|
||||
"""
|
||||
Migration 34
|
||||
|
||||
- Adds projection range columns to projectable entities
|
||||
"""
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
try:
|
||||
saveddata_engine.execute("SELECT projectionRange FROM projectedFits LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE projectedFits ADD COLUMN projectionRange FLOAT;")
|
||||
try:
|
||||
saveddata_engine.execute("SELECT projectionRange FROM modules LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE modules ADD COLUMN projectionRange FLOAT;")
|
||||
try:
|
||||
saveddata_engine.execute("SELECT projectionRange FROM drones LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE drones ADD COLUMN projectionRange FLOAT;")
|
||||
try:
|
||||
saveddata_engine.execute("SELECT projectionRange FROM fighters LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE fighters ADD COLUMN projectionRange FLOAT;")
|
||||
@@ -1,166 +0,0 @@
|
||||
"""
|
||||
Migration 35
|
||||
|
||||
- Remove builtin damage patterns and target profiles from the database
|
||||
"""
|
||||
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
dmgPatterns = (
|
||||
'Uniform',
|
||||
'[Bombs]Concussion Bomb',
|
||||
'[Bombs]Electron Bomb',
|
||||
'[Bombs]Scorch Bomb',
|
||||
'[Bombs]Shrapnel Bomb',
|
||||
'[Exotic Plasma]Baryon',
|
||||
'[Exotic Plasma]Meson',
|
||||
'[Exotic Plasma]Tetryon',
|
||||
'[Exotic Plasma][T2] Mystic',
|
||||
'[Exotic Plasma][T2] Occult',
|
||||
'[Frequency Crystals]Gamma',
|
||||
'[Frequency Crystals]Infrared',
|
||||
'[Frequency Crystals]Microwave',
|
||||
'[Frequency Crystals]Multifrequency',
|
||||
'[Frequency Crystals]Radio',
|
||||
'[Frequency Crystals]Standard',
|
||||
'[Frequency Crystals]Ultraviolet',
|
||||
'[Frequency Crystals]Xray',
|
||||
'[Frequency Crystals][T2] Aurora',
|
||||
'[Frequency Crystals][T2] Conflagration',
|
||||
'[Frequency Crystals][T2] Gleam',
|
||||
'[Frequency Crystals][T2] Scorch',
|
||||
'[Generic]EM',
|
||||
'[Generic]Explosive',
|
||||
'[Generic]Kinetic',
|
||||
'[Generic]Thermal',
|
||||
'[Hybrid Charges]Antimatter',
|
||||
'[Hybrid Charges]Iridium',
|
||||
'[Hybrid Charges]Iron',
|
||||
'[Hybrid Charges]Lead',
|
||||
'[Hybrid Charges]Plutonium',
|
||||
'[Hybrid Charges]Thorium',
|
||||
'[Hybrid Charges]Tungsten',
|
||||
'[Hybrid Charges]Uranium',
|
||||
'[Hybrid Charges][T2] Javelin',
|
||||
'[Hybrid Charges][T2] Null',
|
||||
'[Hybrid Charges][T2] Spike',
|
||||
'[Hybrid Charges][T2] Void',
|
||||
'[Missiles]Inferno',
|
||||
'[Missiles]Mjolnir',
|
||||
'[Missiles]Nova',
|
||||
'[Missiles]Scourge',
|
||||
'[Missiles][Structure) Standup Missile',
|
||||
'[Missiles][Structure] Standup Missile',
|
||||
'[NPC][Asteroid] Angel Cartel',
|
||||
'[NPC][Asteroid] Blood Raiders',
|
||||
'[NPC][Asteroid] Guristas',
|
||||
'[NPC][Asteroid] Rogue Drone',
|
||||
'[NPC][Asteroid] Sanshas Nation',
|
||||
'[NPC][Asteroid] Serpentis',
|
||||
'[NPC][Burner] Ashimmu (Blood Raiders)',
|
||||
'[NPC][Burner] Cruor (Blood Raiders)',
|
||||
'[NPC][Burner] Daredevil (Serpentis)',
|
||||
'[NPC][Burner] Dramiel (Angel)',
|
||||
'[NPC][Burner] Enyo',
|
||||
'[NPC][Burner] Hawk',
|
||||
'[NPC][Burner] Jaguar',
|
||||
'[NPC][Burner] Sentinel',
|
||||
'[NPC][Burner] Succubus (Sanshas Nation)',
|
||||
'[NPC][Burner] Talos',
|
||||
'[NPC][Burner] Vengeance',
|
||||
'[NPC][Burner] Worm (Guristas)',
|
||||
'[NPC][Deadspace] Angel Cartel',
|
||||
'[NPC][Deadspace] Blood Raiders',
|
||||
'[NPC][Deadspace] Guristas',
|
||||
'[NPC][Deadspace] Rogue Drone',
|
||||
'[NPC][Deadspace] Sanshas Nation',
|
||||
'[NPC][Deadspace] Serpentis',
|
||||
'[NPC][Mission] Amarr Empire',
|
||||
'[NPC][Mission] CONCORD',
|
||||
'[NPC][Mission] Caldari State',
|
||||
'[NPC][Mission] Gallente Federation',
|
||||
'[NPC][Mission] Khanid',
|
||||
'[NPC][Mission] Minmatar Republic',
|
||||
'[NPC][Mission] Mordus Legion',
|
||||
'[NPC][Mission] Thukker',
|
||||
'[NPC][Other] Sansha Incursion',
|
||||
'[NPC][Other] Sleepers',
|
||||
'[Projectile Ammo]Carbonized Lead',
|
||||
'[Projectile Ammo]Depleted Uranium',
|
||||
'[Projectile Ammo]EMP',
|
||||
'[Projectile Ammo]Fusion',
|
||||
'[Projectile Ammo]Nuclear',
|
||||
'[Projectile Ammo]Phased Plasma',
|
||||
'[Projectile Ammo]Proton',
|
||||
'[Projectile Ammo]Titanium Sabot',
|
||||
'[Projectile Ammo][T2] Barrage',
|
||||
'[Projectile Ammo][T2] Hail',
|
||||
'[Projectile Ammo][T2] Quake',
|
||||
'[Projectile Ammo][T2] Tremor')
|
||||
|
||||
tgtProfiles = (
|
||||
'Uniform (25%)',
|
||||
'Uniform (50%)',
|
||||
'Uniform (75%)',
|
||||
'Uniform (90%)',
|
||||
'[NPC][Asteroid] Angel Cartel',
|
||||
'[NPC][Asteroid] Blood Raiders',
|
||||
'[NPC][Asteroid] Guristas',
|
||||
'[NPC][Asteroid] Rogue Drones',
|
||||
'[NPC][Asteroid] Sanshas Nation',
|
||||
'[NPC][Asteroid] Serpentis',
|
||||
'[NPC][Burner] Ashimmu (Blood Raiders)',
|
||||
'[NPC][Burner] Cruor (Blood Raiders)',
|
||||
'[NPC][Burner] Daredevil (Serpentis)',
|
||||
'[NPC][Burner] Dramiel (Angel)',
|
||||
'[NPC][Burner] Enyo',
|
||||
'[NPC][Burner] Hawk',
|
||||
'[NPC][Burner] Jaguar',
|
||||
'[NPC][Burner] Sentinel',
|
||||
'[NPC][Burner] Succubus (Sanshas Nation)',
|
||||
'[NPC][Burner] Talos',
|
||||
'[NPC][Burner] Vengeance',
|
||||
'[NPC][Burner] Worm (Guristas)',
|
||||
'[NPC][Deadspace] Angel Cartel',
|
||||
'[NPC][Deadspace] Blood Raiders',
|
||||
'[NPC][Deadspace] Guristas',
|
||||
'[NPC][Deadspace] Rogue Drones',
|
||||
'[NPC][Deadspace] Sanshas Nation',
|
||||
'[NPC][Deadspace] Serpentis',
|
||||
'[NPC][Mission] Amarr Empire',
|
||||
'[NPC][Mission] CONCORD',
|
||||
'[NPC][Mission] Caldari State',
|
||||
'[NPC][Mission] Gallente Federation',
|
||||
'[NPC][Mission] Khanid',
|
||||
'[NPC][Mission] Minmatar Republic',
|
||||
'[NPC][Mission] Mordus Legion',
|
||||
'[NPC][Other] Sansha Incursion',
|
||||
'[NPC][Other] Sleeper',
|
||||
'[T1 Resist]Armor',
|
||||
'[T1 Resist]Armor (+T2 DCU)',
|
||||
'[T1 Resist]Hull',
|
||||
'[T1 Resist]Hull (+T2 DCU)',
|
||||
'[T1 Resist]Shield',
|
||||
'[T1 Resist]Shield (+T2 DCU)',
|
||||
'[T2 Resist]Amarr (Armor)',
|
||||
'[T2 Resist]Amarr (Shield)',
|
||||
'[T2 Resist]Caldari (Armor)',
|
||||
'[T2 Resist]Caldari (Shield)',
|
||||
'[T2 Resist]Gallente (Armor)',
|
||||
'[T2 Resist]Gallente (Shield)',
|
||||
'[T2 Resist]Minmatar (Armor)',
|
||||
'[T2 Resist]Minmatar (Shield)')
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
saveddata_engine.execute('DELETE FROM damagePatterns WHERE name in ({});'.format(', '.join('\'{}\''.format(n) for n in dmgPatterns)))
|
||||
saveddata_engine.execute('DELETE FROM targetResists WHERE name in ({});'.format(', '.join('\'{}\''.format(n) for n in tgtProfiles)))
|
||||
try:
|
||||
saveddata_engine.execute("SELECT builtinDamagePatternID FROM fits LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE fits ADD COLUMN builtinDamagePatternID INT;")
|
||||
try:
|
||||
saveddata_engine.execute("SELECT builtinTargetResistsID FROM fits LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE fits ADD COLUMN builtinTargetResistsID INT;")
|
||||
@@ -1,84 +0,0 @@
|
||||
"""
|
||||
Migration 36
|
||||
|
||||
- Shield Booster, Armor Repairer and Capacitor Transfer tiericide
|
||||
"""
|
||||
|
||||
CONVERSIONS = {
|
||||
6441: ( # Small Clarity Ward Enduring Shield Booster
|
||||
6443, # Small Converse Deflection Catalyzer
|
||||
),
|
||||
6437: ( # Small C5-L Compact Shield Booster
|
||||
6439, # Small Neutron Saturation Injector I
|
||||
),
|
||||
10868: ( # Medium Clarity Ward Enduring Shield Booster
|
||||
10870, # Medium Converse Deflection Catalyzer
|
||||
),
|
||||
10872: ( # Medium C5-L Compact Shield Booster
|
||||
10866, # Medium Neutron Saturation Injector I
|
||||
),
|
||||
10876: ( # Large Clarity Ward Enduring Shield Booster
|
||||
10878, # Large Converse Deflection Catalyzer
|
||||
),
|
||||
10880: ( # Large C5-L Compact Shield Booster
|
||||
10874, # Large Neutron Saturation Injector I
|
||||
),
|
||||
10884: ( # X-Large Clarity Ward Enduring Shield Booster
|
||||
10886, # X-Large Converse Deflection Catalyzer
|
||||
),
|
||||
10888: ( # X-Large C5-L Compact Shield Booster
|
||||
10882, # X-Large Neutron Saturation Injector I
|
||||
),
|
||||
4533: ( # Small ACM Compact Armor Repairer
|
||||
4531, # Small Inefficient Armor Repair Unit
|
||||
),
|
||||
4529: ( # Small I-a Enduring Armor Repairer
|
||||
4535, # Small Automated Carapace Restoration
|
||||
),
|
||||
4573: ( # Medium ACM Compact Armor Repairer
|
||||
4571, # Medium Inefficient Armor Repair Unit
|
||||
),
|
||||
4569: ( # Medium I-a Enduring Armor Repairer
|
||||
4575, # Medium Automated Carapace Restoration
|
||||
),
|
||||
22889: ( # 'Meditation' Medium Armor Repairer I
|
||||
4579, # Medium Nano Armor Repair Unit I
|
||||
),
|
||||
4613: ( # Large ACM Compact Armor Repairer
|
||||
4611, # Large Inefficient Armor Repair Unit
|
||||
),
|
||||
4609: ( # Large I-a Enduring Armor Repairer
|
||||
4615, # Large Automated Carapace Restoration
|
||||
),
|
||||
22891: ( # 'Protest' Large Armor Repairer I
|
||||
4621, # Large 'Reprieve' Vestment Reconstructer I
|
||||
),
|
||||
5093: ( # Small Radiative Scoped Remote Capacitor Transmitter
|
||||
5087, # Small Partial E95a Remote Capacitor Transmitter
|
||||
),
|
||||
5091: ( # Small Inductive Compact Remote Capacitor Transmitter
|
||||
5089, # Small Murky Remote Capacitor Transmitter
|
||||
),
|
||||
16489: ( # Medium Radiative Scoped Remote Capacitor Transmitter
|
||||
16493, # Medium Partial E95b Remote Capacitor Transmitter
|
||||
),
|
||||
16495: ( # Medium Inductive Compact Remote Capacitor Transmitter
|
||||
16491, # Medium Murky Remote Capacitor Transmitter
|
||||
),
|
||||
16481: ( # Large Radiative Scoped Remote Capacitor Transmitter
|
||||
16485, # Large Partial E95c Remote Capacitor Transmitter
|
||||
),
|
||||
16487: ( # Large Inductive Compact Remote Capacitor Transmitter
|
||||
16483, # Large Murky Remote Capacitor Transmitter
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "cargo" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
@@ -1,44 +0,0 @@
|
||||
"""
|
||||
Migration 37
|
||||
|
||||
- Capacitor Booster tiericide
|
||||
"""
|
||||
|
||||
CONVERSIONS = {
|
||||
4959: ( # 'Seed' Micro Capacitor Booster I
|
||||
4957, # Micro Brief Capacitor Overcharge I
|
||||
4961, # Micro Tapered Capacitor Infusion I
|
||||
4955, # Micro F-RX Prototype Capacitor Boost
|
||||
3556, # Micro Capacitor Booster I
|
||||
3558, # Micro Capacitor Booster II
|
||||
15774, # Ammatar Navy Micro Capacitor Booster
|
||||
14180, # Dark Blood Micro Capacitor Booster
|
||||
14182, # True Sansha Micro Capacitor Booster
|
||||
15782, # Imperial Navy Micro Capacitor Booster
|
||||
),
|
||||
5011: ( # Small F-RX Compact Capacitor Booster
|
||||
5009, # Small Brief Capacitor Overcharge I
|
||||
5013, # Small Tapered Capacitor Infusion I
|
||||
5007, # Small F-RX Prototype Capacitor Boost
|
||||
),
|
||||
4833: ( # Medium F-RX Compact Capacitor Booster
|
||||
4831, # Medium Brief Capacitor Overcharge I
|
||||
4835, # Medium Tapered Capacitor Infusion I
|
||||
4829, # Medium F-RX Prototype Capacitor Boost
|
||||
),
|
||||
5051: ( # Heavy F-RX Compact Capacitor Booster
|
||||
5049, # Heavy Brief Capacitor Overcharge I
|
||||
5053, # Heavy Tapered Capacitor Infusion I
|
||||
5047, # Heavy F-RX Prototype Capacitor Boost
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "cargo" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
@@ -1,42 +0,0 @@
|
||||
"""
|
||||
Migration 38
|
||||
|
||||
- Armor hardener tiericide
|
||||
"""
|
||||
|
||||
CONVERSIONS = {
|
||||
16357: ( # Experimental Enduring EM Armor Hardener I
|
||||
16353, # Upgraded Armor EM Hardener I
|
||||
),
|
||||
16365: ( # Experimental Enduring Explosive Armor Hardener I
|
||||
16361, # Upgraded Armor Explosive Hardener I
|
||||
),
|
||||
16373: ( # Experimental Enduring Kinetic Armor Hardener I
|
||||
16369, # Upgraded Armor Kinetic Hardener I
|
||||
),
|
||||
16381: ( # Experimental Enduring Thermal Armor Hardener I
|
||||
16377, # Upgraded Armor Thermal Hardener I
|
||||
),
|
||||
16359: ( # Prototype Compact EM Armor Hardener I
|
||||
16355, # Limited Armor EM Hardener I
|
||||
),
|
||||
16367: ( # Prototype Compact Explosive Armor Hardener I
|
||||
16363, # Limited Armor Explosive Hardener I
|
||||
),
|
||||
16375: ( # Prototype Compact Kinetic Armor Hardener I
|
||||
16371, # Limited Armor Kinetic Hardener I
|
||||
),
|
||||
16383: ( # Prototype Compact Thermal Armor Hardener I
|
||||
16379, # Limited Armor Thermal Hardener I
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "cargo" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
@@ -1,34 +0,0 @@
|
||||
"""
|
||||
Migration 39
|
||||
|
||||
- Shield amplifier tiericide
|
||||
- CCP getting rid of DB TDs due to exploits
|
||||
"""
|
||||
|
||||
CONVERSIONS = {
|
||||
1798: ( # 'Basic' EM Shield Amplifier
|
||||
9562, # Supplemental EM Ward Amplifier
|
||||
),
|
||||
1804: ( # 'Basic' Explosive Shield Amplifier
|
||||
9574, # Supplemental Explosive Deflection Amplifier
|
||||
),
|
||||
1802: ( # 'Basic' Kinetic Shield Amplifier
|
||||
9570, # Supplemental Kinetic Deflection Amplifier
|
||||
),
|
||||
1800: ( # 'Basic' Thermal Shield Amplifier
|
||||
9566, # Supplemental Thermal Dissipation Amplifier
|
||||
),
|
||||
22933: ( # 'Investor' Tracking Disruptor I
|
||||
32416, # Dark Blood Tracking Disruptor
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "cargo" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
@@ -133,7 +133,7 @@ CONVERSIONS = {
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for replacement_item, list in CONVERSIONS.iteritems():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
"""
|
||||
Migration 40
|
||||
|
||||
Imports all item conversions since Migration 28 and runs them against module.baseItemID. This column seems to have been
|
||||
forgotten about since it's been added.
|
||||
|
||||
"""
|
||||
from .upgrade36 import CONVERSIONS as u36
|
||||
from .upgrade37 import CONVERSIONS as u37
|
||||
from .upgrade38 import CONVERSIONS as u38
|
||||
from .upgrade39 import CONVERSIONS as u39
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
for conversions in [u36, u37, u38, u39]:
|
||||
for replacement_item, list in conversions.items():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "baseItemID" = ? WHERE "baseItemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
@@ -1,50 +0,0 @@
|
||||
"""
|
||||
Migration 41
|
||||
|
||||
- Resistance plating tiericide
|
||||
"""
|
||||
|
||||
CONVERSIONS = {
|
||||
16345: ( # Upgraded Layered Coating I
|
||||
16347, # Limited Layered Plating I
|
||||
16349, # 'Scarab' Layered Plating I
|
||||
16351, # 'Grail' Layered Plating I
|
||||
),
|
||||
16305: ( # Upgraded Multispectrum Coating I
|
||||
16307, # Limited Adaptive Nano Plating I
|
||||
16309, # 'Collateral' Adaptive Nano Plating I
|
||||
16311, # 'Refuge' Adaptive Nano Plating I
|
||||
),
|
||||
16329: ( # Upgraded EM Coating I
|
||||
16331, # Limited EM Plating I
|
||||
16333, # 'Contour' EM Plating I
|
||||
16335, # 'Spiegel' EM Plating I
|
||||
),
|
||||
16321: ( # Upgraded Explosive Coating I
|
||||
16323, # Limited Explosive Plating I
|
||||
16325, # Experimental Explosive Plating I
|
||||
16319, # 'Aegis' Explosive Plating I
|
||||
),
|
||||
16313: ( # Upgraded Kinetic Coating I
|
||||
16315, # Limited Kinetic Plating I
|
||||
16317, # Experimental Kinetic Plating I
|
||||
16327, # 'Element' Kinetic Plating I
|
||||
),
|
||||
16337: ( # Upgraded Thermal Coating I
|
||||
16339, # Limited Thermal Plating I
|
||||
16341, # Experimental Thermal Plating I
|
||||
16343, # Prototype Thermal Plating I
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "modules" SET "baseItemID" = ? WHERE "baseItemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "cargo" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
@@ -1,50 +0,0 @@
|
||||
"""
|
||||
Migration 42
|
||||
|
||||
- Resistance membrane tiericide
|
||||
"""
|
||||
|
||||
CONVERSIONS = {
|
||||
16391: ( # Compact Multispectrum Energized Membrane
|
||||
16389, # Experimental Energized Adaptive Nano Membrane I
|
||||
16387, # Limited Energized Adaptive Nano Membrane I
|
||||
16385, # Upgraded Energized Adaptive Nano Membrane I
|
||||
),
|
||||
16423: ( # Compact Layered Energized Membrane
|
||||
16421, # Experimental Energized Armor Layering Membrane I
|
||||
16419, # Limited Energized Armor Layering Membrane I
|
||||
16417, # Upgraded Energized Armor Layering Membrane I
|
||||
),
|
||||
16415: ( # Compact EM Energized Membrane
|
||||
16413, # Experimental Energized EM Membrane I
|
||||
16411, # Limited Energized EM Membrane I
|
||||
16409, # Upgraded Energized EM Membrane I
|
||||
),
|
||||
16407: ( # Compact Explosive Energized Membrane
|
||||
16405, # Experimental Energized Explosive Membrane I
|
||||
16403, # Limited Energized Explosive Membrane I
|
||||
16401, # Upgraded Energized Explosive Membrane I
|
||||
),
|
||||
16399: ( # Compact Kinetic Energized Membrane
|
||||
16397, # Experimental Energized Kinetic Membrane I
|
||||
16395, # Limited Energized Kinetic Membrane I
|
||||
16393, # Upgraded Energized Kinetic Membrane I
|
||||
),
|
||||
16431: ( # Compact Thermal Energized Membrane
|
||||
16429, # Experimental Energized Thermal Membrane I
|
||||
16427, # Limited Energized Thermal Membrane I
|
||||
16425, # Upgraded Energized Thermal Membrane I
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "modules" SET "baseItemID" = ? WHERE "baseItemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "cargo" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
@@ -1,26 +0,0 @@
|
||||
"""
|
||||
Migration 43
|
||||
|
||||
- Shield booster amplifier tiericide
|
||||
"""
|
||||
|
||||
CONVERSIONS = {
|
||||
16533: ( # Stalwart Restrained Shield Boost Amplifier
|
||||
16531, # 5a Prototype Shield Support I
|
||||
),
|
||||
16535: ( # Copasetic Compact Shield Boost Amplifier
|
||||
16529, # Ionic Field Accelerator I
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "modules" SET "baseItemID" = ? WHERE "baseItemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "cargo" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
@@ -1,25 +0,0 @@
|
||||
"""
|
||||
Migration 44
|
||||
|
||||
- Signal distortion amplifier tiericide
|
||||
"""
|
||||
|
||||
CONVERSIONS = {
|
||||
25565: ( # Hypnos Compact Signal Distortion Amplifier I
|
||||
25571, # Initiated Signal Distortion Amplifier I
|
||||
25569, # Induced Signal Distortion Amplifier I
|
||||
25567, # Compulsive Signal Distortion Amplifier I
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "modules" SET "baseItemID" = ? WHERE "baseItemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "cargo" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
@@ -1,18 +0,0 @@
|
||||
"""
|
||||
Migration 45
|
||||
|
||||
- Drone mutaplasmid support
|
||||
"""
|
||||
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
try:
|
||||
saveddata_engine.execute("SELECT baseItemID FROM drones LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE drones ADD COLUMN baseItemID INTEGER;")
|
||||
try:
|
||||
saveddata_engine.execute("SELECT mutaplasmidID FROM drones LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE drones ADD COLUMN mutaplasmidID INTEGER;")
|
||||
@@ -1,68 +0,0 @@
|
||||
"""
|
||||
Migration 46
|
||||
|
||||
- Mining crystal changes
|
||||
"""
|
||||
|
||||
CONVERSIONS = {
|
||||
60276: ( # Simple Asteroid Mining Crystal Type A I
|
||||
18066, # Veldspar Mining Crystal I
|
||||
18062, # Scordite Mining Crystal I
|
||||
18060, # Pyroxeres Mining Crystal I
|
||||
18058, # Plagioclase Mining Crystal I
|
||||
),
|
||||
60281: ( # Simple Asteroid Mining Crystal Type A II
|
||||
18618, # Veldspar Mining Crystal II
|
||||
18616, # Scordite Mining Crystal II
|
||||
18614, # Pyroxeres Mining Crystal II
|
||||
18612, # Plagioclase Mining Crystal II
|
||||
),
|
||||
60285: ( # Coherent Asteroid Mining Crystal Type A I
|
||||
18056, # Omber Mining Crystal I
|
||||
18052, # Kernite Mining Crystal I
|
||||
18050, # Jaspet Mining Crystal I
|
||||
18048, # Hemorphite Mining Crystal I
|
||||
18046, # Hedbergite Mining Crystal I
|
||||
),
|
||||
60288: ( # Coherent Asteroid Mining Crystal Type A II
|
||||
18610, # Omber Mining Crystal II
|
||||
18604, # Jaspet Mining Crystal II
|
||||
18606, # Kernite Mining Crystal II
|
||||
18600, # Hedbergite Mining Crystal II
|
||||
18602, # Hemorphite Mining Crystal II
|
||||
),
|
||||
60291: ( # Variegated Asteroid Mining Crystal Type A I
|
||||
18044, # Gneiss Mining Crystal I
|
||||
18042, # Dark Ochre Mining Crystal I
|
||||
18040, # Crokite Mining Crystal I
|
||||
),
|
||||
60294: ( # Variegated Asteroid Mining Crystal Type A II
|
||||
18598, # Gneiss Mining Crystal II
|
||||
18596, # Dark Ochre Mining Crystal II
|
||||
18594, # Crokite Mining Crystal II
|
||||
),
|
||||
60297: ( # Complex Asteroid Mining Crystal Type A I
|
||||
18038, # Bistot Mining Crystal I
|
||||
18036, # Arkonor Mining Crystal I
|
||||
18064, # Spodumain Mining Crystal I
|
||||
),
|
||||
60300: ( # Complex Asteroid Mining Crystal Type A II
|
||||
18592, # Bistot Mining Crystal II
|
||||
18590, # Arkonor Mining Crystal II
|
||||
18624, # Spodumain Mining Crystal II
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "modules" SET "baseItemID" = ? WHERE "baseItemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "modules" SET "chargeID" = ? WHERE "chargeID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "cargo" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
@@ -17,7 +17,7 @@ CONVERSIONS = {
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert ships
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for replacement_item, list in CONVERSIONS.iteritems():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "fits" SET "shipID" = ? WHERE "shipID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
|
||||
@@ -77,7 +77,7 @@ CONVERSIONS = {
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for replacement_item, list in CONVERSIONS.iteritems():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
__all__ = [
|
||||
"character",
|
||||
"fit",
|
||||
"mutatorMod",
|
||||
"mutatorDrone",
|
||||
"module",
|
||||
"user",
|
||||
"skill",
|
||||
@@ -12,7 +10,9 @@ __all__ = [
|
||||
"implant",
|
||||
"damagePattern",
|
||||
"miscData",
|
||||
"targetProfile",
|
||||
"targetResists",
|
||||
"override",
|
||||
"implantSet"
|
||||
"crest",
|
||||
"implantSet",
|
||||
"loadDefaultDatabaseValues"
|
||||
]
|
||||
|
||||
@@ -24,12 +24,11 @@ import datetime
|
||||
from eos.db import saveddata_meta
|
||||
from eos.saveddata.booster import Booster
|
||||
from eos.saveddata.boosterSideEffect import BoosterSideEffect
|
||||
from eos.saveddata.fit import Fit
|
||||
|
||||
boosters_table = Table("boosters", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("itemID", Integer),
|
||||
Column("fitID", Integer, ForeignKey("fits.ID"), nullable=False, index=True),
|
||||
Column("fitID", Integer, ForeignKey("fits.ID"), nullable=False),
|
||||
Column("active", Boolean),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now),
|
||||
@@ -45,7 +44,6 @@ booster_side_effect_table = Table("boosterSideEffects", saveddata_meta,
|
||||
|
||||
mapper(Booster, boosters_table,
|
||||
properties={
|
||||
"owner": relation(Fit),
|
||||
"_Booster__sideEffects": relation(
|
||||
BoosterSideEffect,
|
||||
backref="booster",
|
||||
|
||||
@@ -17,21 +17,24 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, String, DateTime, Float, UniqueConstraint
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, String, DateTime, Float
|
||||
from sqlalchemy.orm import relation, mapper
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.db.saveddata.implant import charImplants_table
|
||||
from eos.effectHandlerHelpers import HandledImplantList, HandledSsoCharacterList
|
||||
from eos.effectHandlerHelpers import HandledImplantBoosterList
|
||||
from eos.saveddata.implant import Implant
|
||||
from eos.saveddata.user import User
|
||||
from eos.saveddata.character import Character, Skill
|
||||
from eos.saveddata.ssocharacter import SsoCharacter
|
||||
|
||||
characters_table = Table("characters", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("name", String, nullable=False),
|
||||
Column("apiID", Integer),
|
||||
Column("apiKey", String),
|
||||
Column("defaultChar", Integer),
|
||||
Column("chars", String, nullable=True),
|
||||
Column("defaultLevel", Integer, nullable=True),
|
||||
Column("alphaCloneID", Integer, nullable=True),
|
||||
Column("ownerID", ForeignKey("users.ID"), nullable=True),
|
||||
@@ -39,28 +42,6 @@ characters_table = Table("characters", saveddata_meta,
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now))
|
||||
|
||||
sso_table = Table("ssoCharacter", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("client", String, nullable=False),
|
||||
Column("characterID", Integer, nullable=False),
|
||||
Column("characterName", String, nullable=False),
|
||||
Column("refreshToken", String, nullable=False),
|
||||
Column("accessToken", String, nullable=False),
|
||||
Column("accessTokenExpires", DateTime, nullable=False),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now),
|
||||
UniqueConstraint('client', 'characterID', name='uix_client_characterID'),
|
||||
UniqueConstraint('client', 'characterName', name='uix_client_characterName')
|
||||
)
|
||||
|
||||
sso_character_map_table = Table("ssoCharacterMap", saveddata_meta,
|
||||
Column("characterID", ForeignKey("characters.ID"), primary_key=True),
|
||||
Column("ssoCharacterID", ForeignKey("ssoCharacter.ID"), primary_key=True),
|
||||
)
|
||||
|
||||
|
||||
mapper(SsoCharacter, sso_table)
|
||||
|
||||
mapper(Character, characters_table,
|
||||
properties={
|
||||
"_Character__alphaCloneID": characters_table.c.alphaCloneID,
|
||||
@@ -75,17 +56,12 @@ mapper(Character, characters_table,
|
||||
cascade="all,delete-orphan"),
|
||||
"_Character__implants" : relation(
|
||||
Implant,
|
||||
collection_class=HandledImplantList,
|
||||
collection_class=HandledImplantBoosterList,
|
||||
cascade='all,delete-orphan',
|
||||
backref='character',
|
||||
single_parent=True,
|
||||
primaryjoin=charImplants_table.c.charID == characters_table.c.ID,
|
||||
secondaryjoin=charImplants_table.c.implantID == Implant.ID,
|
||||
secondary=charImplants_table),
|
||||
"_Character__ssoCharacters" : relation(
|
||||
SsoCharacter,
|
||||
collection_class=HandledSsoCharacterList,
|
||||
backref='characters',
|
||||
secondary=sso_character_map_table)
|
||||
}
|
||||
)
|
||||
|
||||
34
eos/db/saveddata/crest.py
Normal file
34
eos/db/saveddata/crest.py
Normal file
@@ -0,0 +1,34 @@
|
||||
# ===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
#
|
||||
# eos is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# eos is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, String, DateTime
|
||||
from sqlalchemy.orm import mapper
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.saveddata.crestchar import CrestChar
|
||||
|
||||
crest_table = Table("crest", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("name", String, nullable=False, unique=True),
|
||||
Column("refresh_token", String, nullable=False),
|
||||
# These records aren't updated. Instead, they are dropped and created, hence we don't have a modified field
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now))
|
||||
|
||||
mapper(CrestChar, crest_table)
|
||||
@@ -17,27 +17,23 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, Float, ForeignKey, String, DateTime
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, String, DateTime
|
||||
from sqlalchemy.orm import mapper
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.saveddata.damagePattern import DamagePattern
|
||||
|
||||
damagePatterns_table = Table(
|
||||
'damagePatterns',
|
||||
saveddata_meta,
|
||||
Column('ID', Integer, primary_key=True),
|
||||
Column('name', String),
|
||||
Column('emAmount', Float),
|
||||
Column('thermalAmount', Float),
|
||||
Column('kineticAmount', Float),
|
||||
Column('explosiveAmount', Float),
|
||||
Column('ownerID', ForeignKey('users.ID'), nullable=True),
|
||||
Column('created', DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column('modified', DateTime, nullable=True, onupdate=datetime.datetime.now))
|
||||
damagePatterns_table = Table("damagePatterns", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("name", String),
|
||||
Column("emAmount", Integer),
|
||||
Column("thermalAmount", Integer),
|
||||
Column("kineticAmount", Integer),
|
||||
Column("explosiveAmount", Integer),
|
||||
Column("ownerID", ForeignKey("users.ID"), nullable=True),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now)
|
||||
)
|
||||
|
||||
mapper(
|
||||
DamagePattern,
|
||||
damagePatterns_table,
|
||||
properties={'rawName': damagePatterns_table.c.name})
|
||||
mapper(DamagePattern, damagePatterns_table)
|
||||
|
||||
@@ -23,7 +23,7 @@ from logbook import Logger
|
||||
pyfalog = Logger(__name__)
|
||||
|
||||
|
||||
class DatabaseCleanup:
|
||||
class DatabaseCleanup(object):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
|
||||
@@ -17,36 +17,27 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, Float, ForeignKey, Boolean, DateTime
|
||||
from sqlalchemy.orm import mapper, relation, synonym
|
||||
from sqlalchemy.orm.collections import attribute_mapped_collection
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, Boolean, DateTime
|
||||
from sqlalchemy.orm import mapper, relation
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.saveddata.drone import Drone
|
||||
from eos.saveddata.fit import Fit
|
||||
from eos.saveddata.mutator import MutatorDrone
|
||||
|
||||
drones_table = Table("drones", saveddata_meta,
|
||||
Column("groupID", Integer, primary_key=True),
|
||||
Column("fitID", Integer, ForeignKey("fits.ID"), nullable=False, index=True),
|
||||
Column("itemID", Integer, nullable=False),
|
||||
Column("baseItemID", Integer, nullable=True),
|
||||
Column("mutaplasmidID", Integer, nullable=True),
|
||||
Column("amount", Integer, nullable=False),
|
||||
Column("amountActive", Integer, nullable=False),
|
||||
Column("projected", Boolean, default=False),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now),
|
||||
Column("projectionRange", Float, nullable=True))
|
||||
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now)
|
||||
)
|
||||
|
||||
mapper(Drone, drones_table,
|
||||
properties={
|
||||
"ID": synonym("groupID"),
|
||||
"owner": relation(Fit),
|
||||
"mutators": relation(
|
||||
MutatorDrone,
|
||||
backref="item",
|
||||
cascade="all,delete-orphan",
|
||||
collection_class=attribute_mapped_collection('attrID'))})
|
||||
"owner": relation(Fit)
|
||||
}
|
||||
)
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, Float, ForeignKey, Boolean, DateTime
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, Boolean, DateTime
|
||||
from sqlalchemy.orm import mapper, relation
|
||||
import datetime
|
||||
|
||||
@@ -34,8 +34,7 @@ fighters_table = Table("fighters", saveddata_meta,
|
||||
Column("amount", Integer, nullable=False),
|
||||
Column("projected", Boolean, default=False),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now),
|
||||
Column("projectionRange", Float, nullable=True),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now)
|
||||
)
|
||||
|
||||
fighter_abilities_table = Table("fightersAbilities", saveddata_meta,
|
||||
@@ -47,7 +46,6 @@ fighter_abilities_table = Table("fightersAbilities", saveddata_meta,
|
||||
mapper(Fighter, fighters_table,
|
||||
properties={
|
||||
"owner" : relation(Fit),
|
||||
"_amount" : fighters_table.c.amount,
|
||||
"_Fighter__abilities": relation(
|
||||
FighterAbility,
|
||||
backref="fighter",
|
||||
|
||||
@@ -17,33 +17,33 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
import datetime
|
||||
|
||||
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, Float, String, Table
|
||||
from sqlalchemy.ext.associationproxy import association_proxy
|
||||
from sqlalchemy.orm import mapper, reconstructor, relation, relationship
|
||||
from sqlalchemy.orm.collections import attribute_mapped_collection
|
||||
from sqlalchemy.sql import and_
|
||||
from sqlalchemy.orm import relation, reconstructor, mapper, relationship
|
||||
from sqlalchemy import ForeignKey, Column, Integer, String, Table, Boolean, DateTime
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta, saveddata_session
|
||||
from eos.db import saveddata_meta
|
||||
from eos.db import saveddata_session
|
||||
from eos.db.saveddata.cargo import cargo_table
|
||||
from eos.db.saveddata.drone import drones_table
|
||||
from eos.db.saveddata.fighter import fighters_table
|
||||
from eos.db.saveddata.implant import fitImplants_table
|
||||
from eos.db.saveddata.module import modules_table
|
||||
from eos.effectHandlerHelpers import HandledDroneCargoList, HandledImplantList, HandledBoosterList, HandledModuleList, HandledProjectedDroneList, HandledProjectedModList
|
||||
from eos.saveddata.booster import Booster
|
||||
from eos.saveddata.cargo import Cargo
|
||||
from eos.saveddata.character import Character
|
||||
from eos.saveddata.damagePattern import DamagePattern
|
||||
from eos.saveddata.drone import Drone
|
||||
from eos.saveddata.fighter import Fighter
|
||||
from eos.saveddata.fit import Fit as es_Fit
|
||||
from eos.effectHandlerHelpers import HandledModuleList, HandledImplantBoosterList, HandledProjectedModList, \
|
||||
HandledDroneCargoList, HandledProjectedDroneList
|
||||
from eos.saveddata.implant import Implant
|
||||
from eos.saveddata.module import Module
|
||||
from eos.saveddata.targetProfile import TargetProfile
|
||||
from eos.saveddata.character import Character
|
||||
from eos.saveddata.user import User
|
||||
|
||||
from eos.saveddata.fighter import Fighter
|
||||
from eos.saveddata.fit import Fit as es_Fit, ImplantLocation
|
||||
from eos.saveddata.drone import Drone
|
||||
from eos.saveddata.booster import Booster
|
||||
from eos.saveddata.module import Module
|
||||
from eos.saveddata.cargo import Cargo
|
||||
from eos.saveddata.damagePattern import DamagePattern
|
||||
from eos.saveddata.targetResists import TargetResists
|
||||
|
||||
fits_table = Table("fits", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
@@ -53,17 +53,14 @@ fits_table = Table("fits", saveddata_meta,
|
||||
Column("timestamp", Integer, nullable=False),
|
||||
Column("characterID", ForeignKey("characters.ID"), nullable=True),
|
||||
Column("damagePatternID", ForeignKey("damagePatterns.ID"), nullable=True),
|
||||
Column("builtinDamagePatternID", Integer, nullable=True),
|
||||
Column("booster", Boolean, nullable=False, index=True, default=0),
|
||||
Column("targetResistsID", ForeignKey("targetResists.ID"), nullable=True),
|
||||
Column("builtinTargetResistsID", Integer, nullable=True),
|
||||
Column("modeID", Integer, nullable=True),
|
||||
Column("implantLocation", Integer, nullable=False),
|
||||
Column("implantLocation", Integer, nullable=False, default=ImplantLocation.FIT),
|
||||
Column("notes", String, nullable=True),
|
||||
Column("ignoreRestrictions", Boolean, default=0),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, default=datetime.datetime.now, onupdate=datetime.datetime.now),
|
||||
Column("systemSecurity", Integer, nullable=True)
|
||||
Column("modified", DateTime, nullable=True, default=datetime.datetime.now, onupdate=datetime.datetime.now)
|
||||
)
|
||||
|
||||
projectedFits_table = Table("projectedFits", saveddata_meta,
|
||||
@@ -72,8 +69,7 @@ projectedFits_table = Table("projectedFits", saveddata_meta,
|
||||
Column("amount", Integer, nullable=False, default=1),
|
||||
Column("active", Boolean, nullable=False, default=1),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now),
|
||||
Column("projectionRange", Float, nullable=True),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now)
|
||||
)
|
||||
|
||||
commandFits_table = Table("commandFits", saveddata_meta,
|
||||
@@ -85,8 +81,7 @@ commandFits_table = Table("commandFits", saveddata_meta,
|
||||
)
|
||||
|
||||
|
||||
class ProjectedFit:
|
||||
|
||||
class ProjectedFit(object):
|
||||
def __init__(self, sourceID, source_fit, amount=1, active=True):
|
||||
self.sourceID = sourceID
|
||||
self.source_fit = source_fit
|
||||
@@ -117,7 +112,7 @@ class ProjectedFit:
|
||||
)
|
||||
|
||||
|
||||
class CommandFit:
|
||||
class CommandFit(object):
|
||||
def __init__(self, boosterID, booster_fit, active=True):
|
||||
self.boosterID = boosterID
|
||||
self.booster_fit = booster_fit
|
||||
@@ -137,13 +132,13 @@ class CommandFit:
|
||||
)
|
||||
|
||||
|
||||
es_Fit.projectedFitDict = association_proxy(
|
||||
es_Fit._Fit__projectedFits = association_proxy(
|
||||
"victimOf", # look at the victimOf association...
|
||||
"source_fit", # .. and return the source fits
|
||||
creator=lambda sourceID, source_fit: ProjectedFit(sourceID, source_fit)
|
||||
)
|
||||
|
||||
es_Fit.commandFitDict = association_proxy(
|
||||
es_Fit._Fit__commandFits = association_proxy(
|
||||
"boostedOf", # look at the boostedOf association...
|
||||
"booster_fit", # .. and return the booster fit
|
||||
creator=lambda boosterID, booster_fit: CommandFit(boosterID, booster_fit)
|
||||
@@ -175,13 +170,12 @@ mapper(es_Fit, fits_table,
|
||||
collection_class=HandledModuleList,
|
||||
primaryjoin=and_(modules_table.c.fitID == fits_table.c.ID, modules_table.c.projected == False), # noqa
|
||||
order_by=modules_table.c.position,
|
||||
overlaps='owner',
|
||||
cascade='all, delete, delete-orphan'),
|
||||
"_Fit__projectedModules": relation(
|
||||
Module,
|
||||
collection_class=HandledProjectedModList,
|
||||
overlaps='owner, _Fit__modules',
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(modules_table.c.fitID == fits_table.c.ID, modules_table.c.projected == True)), # noqa
|
||||
"owner": relation(
|
||||
User,
|
||||
@@ -190,42 +184,43 @@ mapper(es_Fit, fits_table,
|
||||
"shipID": fits_table.c.shipID,
|
||||
"_Fit__boosters": relation(
|
||||
Booster,
|
||||
collection_class=HandledBoosterList,
|
||||
overlaps='owner',
|
||||
cascade='all, delete, delete-orphan'),
|
||||
collection_class=HandledImplantBoosterList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
backref='owner',
|
||||
single_parent=True),
|
||||
"_Fit__drones": relation(
|
||||
Drone,
|
||||
collection_class=HandledDroneCargoList,
|
||||
overlaps='owner',
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(drones_table.c.fitID == fits_table.c.ID, drones_table.c.projected == False)), # noqa
|
||||
"_Fit__fighters": relation(
|
||||
Fighter,
|
||||
collection_class=HandledDroneCargoList,
|
||||
overlaps='owner',
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(fighters_table.c.fitID == fits_table.c.ID, fighters_table.c.projected == False)), # noqa
|
||||
"_Fit__cargo": relation(
|
||||
Cargo,
|
||||
collection_class=HandledDroneCargoList,
|
||||
overlaps='owner',
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(cargo_table.c.fitID == fits_table.c.ID)),
|
||||
"_Fit__projectedDrones": relation(
|
||||
Drone,
|
||||
collection_class=HandledProjectedDroneList,
|
||||
overlaps='owner, _Fit__drones',
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(drones_table.c.fitID == fits_table.c.ID, drones_table.c.projected == True)), # noqa
|
||||
"_Fit__projectedFighters": relation(
|
||||
Fighter,
|
||||
collection_class=HandledProjectedDroneList,
|
||||
overlaps='owner, _Fit__fighters',
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(fighters_table.c.fitID == fits_table.c.ID, fighters_table.c.projected == True)), # noqa
|
||||
"_Fit__implants": relation(
|
||||
Implant,
|
||||
collection_class=HandledImplantList,
|
||||
collection_class=HandledImplantBoosterList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
backref='owner',
|
||||
single_parent=True,
|
||||
@@ -235,10 +230,8 @@ mapper(es_Fit, fits_table,
|
||||
"_Fit__character": relation(
|
||||
Character,
|
||||
backref="fits"),
|
||||
"_Fit__userDamagePattern": relation(DamagePattern),
|
||||
"_Fit__builtinDamagePatternID": fits_table.c.builtinDamagePatternID,
|
||||
"_Fit__userTargetProfile": relation(TargetProfile),
|
||||
"_Fit__builtinTargetProfileID": fits_table.c.builtinTargetResistsID,
|
||||
"_Fit__damagePattern": relation(DamagePattern),
|
||||
"_Fit__targetResists": relation(TargetResists),
|
||||
"projectedOnto": projectedFitSourceRel,
|
||||
"victimOf": relationship(
|
||||
ProjectedFit,
|
||||
|
||||
@@ -23,7 +23,7 @@ import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.db.saveddata.implant import implantsSetMap_table
|
||||
from eos.effectHandlerHelpers import HandledImplantList
|
||||
from eos.effectHandlerHelpers import HandledImplantBoosterList
|
||||
from eos.saveddata.implant import Implant
|
||||
from eos.saveddata.implantSet import ImplantSet
|
||||
|
||||
@@ -38,7 +38,7 @@ mapper(ImplantSet, implant_set_table,
|
||||
properties={
|
||||
"_ImplantSet__implants": relation(
|
||||
Implant,
|
||||
collection_class=HandledImplantList,
|
||||
collection_class=HandledImplantBoosterList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
backref='set',
|
||||
single_parent=True,
|
||||
|
||||
198
eos/db/saveddata/loadDefaultDatabaseValues.py
Normal file
198
eos/db/saveddata/loadDefaultDatabaseValues.py
Normal file
@@ -0,0 +1,198 @@
|
||||
# ===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of pyfa.
|
||||
#
|
||||
# pyfa is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# pyfa is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
import eos.db
|
||||
from eos.saveddata.damagePattern import DamagePattern as es_DamagePattern
|
||||
from eos.saveddata.targetResists import TargetResists as es_TargetResists
|
||||
|
||||
|
||||
class ImportError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DefaultDatabaseValues(object):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
instance = None
|
||||
|
||||
@classmethod
|
||||
def importDamageProfileDefaults(cls):
|
||||
damageProfileList = [["Uniform", "25", "25", "25", "25"], ["[Generic]EM", "100", "0", "0", "0"],
|
||||
["[Generic]Thermal", "0", "100", "0", "0"], ["[Generic]Kinetic", "0", "0", "100", "0"],
|
||||
["[Generic]Explosive", "0", "0", "0", "100"],
|
||||
["[NPC][Asteroid] Blood Raiders", "5067", "4214", "0", "0"],
|
||||
["[Bombs]Concussion Bomb", "0", "0", "6400", "0"],
|
||||
["[Bombs]Electron Bomb", "6400", "0", "0", "0"],
|
||||
["[Bombs]Scorch Bomb", "0", "6400", "0", "0"],
|
||||
["[Bombs]Shrapnel Bomb", "0", "0", "0", "6400"],
|
||||
["[Frequency Crystals][T2] Gleam", "56", "56", "0", "0"],
|
||||
["[Frequency Crystals][T2] Aurora", "40", "24", "0", "0"],
|
||||
["[Frequency Crystals][T2] Scorch", "72", "16", "0", "0"],
|
||||
["[Frequency Crystals][T2] Conflagration", "61.6", "61.6", "0", "0"],
|
||||
["[Frequency Crystals]Gamma", "61.6", "35.2", "0", "0"],
|
||||
["[Frequency Crystals]Infrared", "44", "17.6", "0", "0"],
|
||||
["[Frequency Crystals]Microwave", "35.2", "17.6", "0", "0"],
|
||||
["[Frequency Crystals]Multifrequency", "61.6", "44", "0", "0"],
|
||||
["[Frequency Crystals]Radio", "44", "0", "0", "0"],
|
||||
["[Frequency Crystals]Standard", "44", "26.4", "0", "0"],
|
||||
["[Frequency Crystals]Ultraviolet", "52.8", "26.4", "0", "0"],
|
||||
["[Frequency Crystals]Xray", "52.8", "35.2", "0", "0"],
|
||||
["[Hybrid Charges][T2] Void", "0", "61.6", "61.6", "0"],
|
||||
["[Hybrid Charges][T2] Null", "0", "48", "40", "0"],
|
||||
["[Hybrid Charges][T2] Javelin", "0", "64", "48", "0"],
|
||||
["[Hybrid Charges][T2] Spike", "0", "32", "32", "0"],
|
||||
["[Hybrid Charges]Antimatter", "0", "48", "67.2", "0"],
|
||||
["[Hybrid Charges]Iridium", "0", "28.8", "38.4", "0"],
|
||||
["[Hybrid Charges]Iron", "0", "19.2", "28.8", "0"],
|
||||
["[Hybrid Charges]Lead", "0", "28.8", "48", "0"],
|
||||
["[Hybrid Charges]Plutonium", "0", "48", "57.6", "0"],
|
||||
["[Hybrid Charges]Thorium", "0", "38.4", "48", "0"],
|
||||
["[Hybrid Charges]Tungsten", "0", "19.2", "38.4", "0"],
|
||||
["[Hybrid Charges]Uranium", "0", "38.4", "57.6", "0"],
|
||||
["[Missiles]Mjolnir", "100", "0", "0", "0"], ["[Missiles]Inferno", "0", "100", "0", "0"],
|
||||
["[Missiles]Scourge", "0", "0", "100", "0"], ["[Missiles]Nova", "0", "0", "0", "100"],
|
||||
["[Missiles][Structure] Standup Missile", "100", "100", "100", "100"],
|
||||
["[Projectile Ammo][T2] Tremor", "0", "0", "24", "40"],
|
||||
["[Projectile Ammo][T2] Quake", "0", "0", "40", "72"],
|
||||
["[Projectile Ammo][T2] Hail", "0", "0", "26.4", "96.8"],
|
||||
["[Projectile Ammo][T2] Barrage", "0", "0", "40", "48"],
|
||||
["[Projectile Ammo]Carbonized Lead", "0", "0", "35.2", "8.8"],
|
||||
["[Projectile Ammo]Depleted Uranium", "0", "26.4", "17.6", "26.4"],
|
||||
["[Projectile Ammo]EMP", "79.2", "0", "8.8", "17.6"],
|
||||
["[Projectile Ammo]Fusion", "0", "0", "17.6", "88"],
|
||||
["[Projectile Ammo]Nuclear", "0", "0", "8.8", "35.2"],
|
||||
["[Projectile Ammo]Phased Plasma", "0", "88", "17.6", "0"],
|
||||
["[Projectile Ammo]Proton", "26.4", "0", "17.6", "0"],
|
||||
["[Projectile Ammo]Titanium Sabot", "0", "0", "52.8", "176"],
|
||||
["[NPC][Burner] Cruor (Blood Raiders)", "90", "90", "0", "0"],
|
||||
["[NPC][Burner] Dramiel (Angel)", "55", "0", "20", "96"],
|
||||
["[NPC][Burner] Daredevil (Serpentis)", "0", "110", "154", "0"],
|
||||
["[NPC][Burner] Succubus (Sanshas Nation)", "135", "30", "0", "0"],
|
||||
["[NPC][Burner] Worm (Guristas)", "0", "0", "228", "0"],
|
||||
["[NPC][Burner] Enyo", "0", "147", "147", "0"],
|
||||
["[NPC][Burner] Hawk", "0", "0", "247", "0"],
|
||||
["[NPC][Burner] Jaguar", "36", "0", "50", "182"],
|
||||
["[NPC][Burner] Vengeance", "232", "0", "0", "0"],
|
||||
["[NPC][Burner] Ashimmu (Blood Raiders)", "260", "100", "0", "0"],
|
||||
["[NPC][Burner] Talos", "0", "413", "413", "0"],
|
||||
["[NPC][Burner] Sentinel", "0", "75", "0", "90"],
|
||||
["[NPC][Asteroid] Angel Cartel", "1838", "562", "2215", "3838"],
|
||||
["[NPC][Deadspace] Angel Cartel", "369", "533", "1395", "3302"],
|
||||
["[NPC][Deadspace] Blood Raiders", "6040", "5052", "10", "15"],
|
||||
["[NPC][Asteroid] Guristas", "0", "1828", "7413", "0"],
|
||||
["[NPC][Deadspace] Guristas", "0", "1531", "9680", "0"],
|
||||
["[NPC][Asteroid] Rogue Drone", "394", "666", "1090", "1687"],
|
||||
["[NPC][Deadspace] Rogue Drone", "276", "1071", "1069", "871"],
|
||||
["[NPC][Asteroid] Sanshas Nation", "5586", "4112", "0", "0"],
|
||||
["[NPC][Deadspace] Sanshas Nation", "3009", "2237", "0", "0"],
|
||||
["[NPC][Asteroid] Serpentis", "0", "5373", "4813", "0"],
|
||||
["[NPC][Deadspace] Serpentis", "0", "3110", "1929", "0"],
|
||||
["[NPC][Mission] Amarr Empire", "4464", "3546", "97", "0"],
|
||||
["[NPC][Mission] Caldari State", "0", "2139", "4867", "0"],
|
||||
["[NPC][Mission] CONCORD", "336", "134", "212", "412"],
|
||||
["[NPC][Mission] Gallente Federation", "9", "3712", "2758", "0"],
|
||||
["[NPC][Mission] Khanid", "612", "483", "43", "6"],
|
||||
["[NPC][Mission] Minmatar Republic", "1024", "388", "1655", "4285"],
|
||||
["[NPC][Mission] Mordus Legion", "25", "262", "625", "0"],
|
||||
["[NPC][Mission] Thukker", "0", "52", "10", "79"],
|
||||
["[NPC][Other] Sleepers", "1472", "1472", "1384", "1384"],
|
||||
["[NPC][Other] Sansha Incursion", "1682", "1347", "3678", "3678"]]
|
||||
|
||||
for damageProfileRow in damageProfileList:
|
||||
name, em, therm, kin, exp = damageProfileRow
|
||||
damageProfile = eos.db.getDamagePattern(name)
|
||||
if damageProfile is None:
|
||||
damageProfile = es_DamagePattern(em, therm, kin, exp)
|
||||
damageProfile.name = name
|
||||
eos.db.save(damageProfile)
|
||||
|
||||
@classmethod
|
||||
def importResistProfileDefaults(cls):
|
||||
targetResistProfileList = [["Uniform (25%)", "0.25", "0.25", "0.25", "0.25"],
|
||||
["Uniform (50%)", "0.50", "0.50", "0.50", "0.50"],
|
||||
["Uniform (75%)", "0.75", "0.75", "0.75", "0.75"],
|
||||
["Uniform (90%)", "0.90", "0.90", "0.90", "0.90"],
|
||||
["[T1 Resist]Shield", "0.0", "0.20", "0.40", "0.50"],
|
||||
["[T1 Resist]Armor", "0.50", "0.45", "0.25", "0.10"],
|
||||
["[T1 Resist]Hull", "0.33", "0.33", "0.33", "0.33"],
|
||||
["[T1 Resist]Shield (+T2 DCU)", "0.125", "0.30", "0.475", "0.562"],
|
||||
["[T1 Resist]Armor (+T2 DCU)", "0.575", "0.532", "0.363", "0.235"],
|
||||
["[T1 Resist]Hull (+T2 DCU)", "0.598", "0.598", "0.598", "0.598"],
|
||||
["[T2 Resist]Amarr (Shield)", "0.0", "0.20", "0.70", "0.875"],
|
||||
["[T2 Resist]Amarr (Armor)", "0.50", "0.35", "0.625", "0.80"],
|
||||
["[T2 Resist]Caldari (Shield)", "0.20", "0.84", "0.76", "0.60"],
|
||||
["[T2 Resist]Caldari (Armor)", "0.50", "0.8625", "0.625", "0.10"],
|
||||
["[T2 Resist]Gallente (Shield)", "0.0", "0.60", "0.85", "0.50"],
|
||||
["[T2 Resist]Gallente (Armor)", "0.50", "0.675", "0.8375", "0.10"],
|
||||
["[T2 Resist]Minmatar (Shield)", "0.75", "0.60", "0.40", "0.50"],
|
||||
["[T2 Resist]Minmatar (Armor)", "0.90", "0.675", "0.25", "0.10"],
|
||||
["[NPC][Asteroid] Angel Cartel", "0.54", "0.42", "0.37", "0.32"],
|
||||
["[NPC][Asteroid] Blood Raiders", "0.34", "0.39", "0.45", "0.52"],
|
||||
["[NPC][Asteroid] Guristas", "0.55", "0.35", "0.3", "0.48"],
|
||||
["[NPC][Asteroid] Rogue Drones", "0.35", "0.38", "0.44", "0.49"],
|
||||
["[NPC][Asteroid] Sanshas Nation", "0.35", "0.4", "0.47", "0.53"],
|
||||
["[NPC][Asteroid] Serpentis", "0.49", "0.38", "0.29", "0.51"],
|
||||
["[NPC][Deadspace] Angel Cartel", "0.59", "0.48", "0.4", "0.32"],
|
||||
["[NPC][Deadspace] Blood Raiders", "0.31", "0.39", "0.47", "0.56"],
|
||||
["[NPC][Deadspace] Guristas", "0.57", "0.39", "0.31", "0.5"],
|
||||
["[NPC][Deadspace] Rogue Drones", "0.42", "0.42", "0.47", "0.49"],
|
||||
["[NPC][Deadspace] Sanshas Nation", "0.31", "0.39", "0.47", "0.56"],
|
||||
["[NPC][Deadspace] Serpentis", "0.49", "0.38", "0.29", "0.56"],
|
||||
["[NPC][Mission] Amarr Empire", "0.34", "0.38", "0.42", "0.46"],
|
||||
["[NPC][Mission] Caldari State", "0.51", "0.38", "0.3", "0.51"],
|
||||
["[NPC][Mission] CONCORD", "0.47", "0.46", "0.47", "0.47"],
|
||||
["[NPC][Mission] Gallente Federation", "0.51", "0.38", "0.31", "0.52"],
|
||||
["[NPC][Mission] Khanid", "0.51", "0.42", "0.36", "0.4"],
|
||||
["[NPC][Mission] Minmatar Republic", "0.51", "0.46", "0.41", "0.35"],
|
||||
["[NPC][Mission] Mordus Legion", "0.32", "0.48", "0.4", "0.62"],
|
||||
["[NPC][Other] Sleeper", "0.61", "0.61", "0.61", "0.61"],
|
||||
["[NPC][Other] Sansha Incursion", "0.65", "0.63", "0.64", "0.65"],
|
||||
["[NPC][Burner] Cruor (Blood Raiders)", "0.8", "0.73", "0.69", "0.67"],
|
||||
["[NPC][Burner] Dramiel (Angel)", "0.35", "0.48", "0.61", "0.68"],
|
||||
["[NPC][Burner] Daredevil (Serpentis)", "0.69", "0.59", "0.59", "0.43"],
|
||||
["[NPC][Burner] Succubus (Sanshas Nation)", "0.35", "0.48", "0.61", "0.68"],
|
||||
["[NPC][Burner] Worm (Guristas)", "0.48", "0.58", "0.69", "0.74"],
|
||||
["[NPC][Burner] Enyo", "0.58", "0.72", "0.86", "0.24"],
|
||||
["[NPC][Burner] Hawk", "0.3", "0.86", "0.79", "0.65"],
|
||||
["[NPC][Burner] Jaguar", "0.78", "0.65", "0.48", "0.56"],
|
||||
["[NPC][Burner] Vengeance", "0.66", "0.56", "0.75", "0.86"],
|
||||
["[NPC][Burner] Ashimmu (Blood Raiders)", "0.8", "0.76", "0.68", "0.7"],
|
||||
["[NPC][Burner] Talos", "0.68", "0.59", "0.59", "0.43"],
|
||||
["[NPC][Burner] Sentinel", "0.58", "0.45", "0.52", "0.66"]]
|
||||
|
||||
for targetResistProfileRow in targetResistProfileList:
|
||||
name, em, therm, kin, exp = targetResistProfileRow
|
||||
resistsProfile = eos.db.eos.db.getTargetResists(name)
|
||||
if resistsProfile is None:
|
||||
resistsProfile = es_TargetResists(em, therm, kin, exp)
|
||||
resistsProfile.name = name
|
||||
eos.db.save(resistsProfile)
|
||||
|
||||
@classmethod
|
||||
def importRequiredDefaults(cls):
|
||||
damageProfileList = [["Uniform", "25", "25", "25", "25"]]
|
||||
|
||||
for damageProfileRow in damageProfileList:
|
||||
name, em, therm, kin, exp = damageProfileRow
|
||||
damageProfile = eos.db.getDamagePattern(name)
|
||||
if damageProfile is None:
|
||||
damageProfile = es_DamagePattern(em, therm, kin, exp)
|
||||
damageProfile.name = name
|
||||
eos.db.save(damageProfile)
|
||||
@@ -17,22 +17,18 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, Float, ForeignKey, CheckConstraint, Boolean, DateTime
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, CheckConstraint, Boolean, DateTime
|
||||
from sqlalchemy.orm import relation, mapper
|
||||
from sqlalchemy.orm.collections import attribute_mapped_collection
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.saveddata.module import Module
|
||||
from eos.saveddata.fit import Fit
|
||||
from eos.saveddata.mutator import MutatorModule
|
||||
|
||||
modules_table = Table("modules", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("fitID", Integer, ForeignKey("fits.ID"), nullable=False, index=True),
|
||||
Column("itemID", Integer, nullable=True),
|
||||
Column("baseItemID", Integer, nullable=True),
|
||||
Column("mutaplasmidID", Integer, nullable=True),
|
||||
Column("dummySlot", Integer, nullable=True, default=None),
|
||||
Column("chargeID", Integer),
|
||||
Column("state", Integer, CheckConstraint("state >= -1"), CheckConstraint("state <= 2")),
|
||||
@@ -40,17 +36,7 @@ modules_table = Table("modules", saveddata_meta,
|
||||
Column("position", Integer),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now),
|
||||
Column("spoolType", Integer, nullable=True),
|
||||
Column("spoolAmount", Float, nullable=True),
|
||||
Column("projectionRange", Float, nullable=True),
|
||||
CheckConstraint('("dummySlot" = NULL OR "itemID" = NULL) AND "dummySlot" != "itemID"'))
|
||||
|
||||
|
||||
mapper(Module, modules_table,
|
||||
properties={
|
||||
"owner": relation(Fit),
|
||||
"mutators": relation(
|
||||
MutatorModule,
|
||||
backref="item",
|
||||
cascade="all,delete-orphan",
|
||||
collection_class=attribute_mapped_collection('attrID'))})
|
||||
properties={"owner": relation(Fit)})
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
# ===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
#
|
||||
# eos is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# eos is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
import datetime
|
||||
|
||||
from sqlalchemy import Column, DateTime, Float, ForeignKey, Integer, Table
|
||||
from sqlalchemy.orm import mapper
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.saveddata.mutator import MutatorDrone
|
||||
|
||||
mutatorDrones_table = Table(
|
||||
"mutatorsDrones", saveddata_meta,
|
||||
Column("groupID", Integer, ForeignKey("drones.groupID"), primary_key=True, index=True),
|
||||
Column("attrID", Integer, primary_key=True, index=True),
|
||||
Column("value", Float, nullable=False),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now))
|
||||
|
||||
mapper(MutatorDrone, mutatorDrones_table)
|
||||
@@ -1,36 +0,0 @@
|
||||
# ===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
#
|
||||
# eos is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# eos is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
import datetime
|
||||
|
||||
from sqlalchemy import Column, DateTime, Float, ForeignKey, Integer, Table
|
||||
from sqlalchemy.orm import mapper
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.saveddata.mutator import MutatorModule
|
||||
|
||||
mutatorMods_table = Table(
|
||||
"mutators", saveddata_meta,
|
||||
Column("moduleID", Integer, ForeignKey("modules.ID"), primary_key=True, index=True),
|
||||
Column("attrID", Integer, primary_key=True, index=True),
|
||||
Column("value", Float, nullable=False),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now))
|
||||
|
||||
mapper(MutatorModule, mutatorMods_table)
|
||||
@@ -17,19 +17,18 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
|
||||
from sqlalchemy import Table, Column, Float, Integer
|
||||
from sqlalchemy.orm import mapper
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.saveddata.price import Price
|
||||
|
||||
|
||||
prices_table = Table("prices", saveddata_meta,
|
||||
Column("typeID", Integer, primary_key=True),
|
||||
Column("price", Float, default=0.0),
|
||||
Column("time", Integer, nullable=False),
|
||||
Column("status", Integer, nullable=False))
|
||||
Column("failed", Integer))
|
||||
|
||||
|
||||
mapper(Price, prices_table)
|
||||
mapper(Price, prices_table, properties={
|
||||
"_Price__price": prices_table.c.price,
|
||||
})
|
||||
|
||||
@@ -21,19 +21,18 @@ import sys
|
||||
|
||||
from sqlalchemy.sql import and_
|
||||
from sqlalchemy import desc, select
|
||||
from sqlalchemy import func
|
||||
|
||||
from eos.db import saveddata_session, sd_lock
|
||||
from eos.db.saveddata.fit import fits_table, projectedFits_table
|
||||
from eos.db.saveddata.fit import projectedFits_table
|
||||
from eos.db.util import processEager, processWhere
|
||||
from eos.saveddata.price import Price
|
||||
from eos.saveddata.user import User
|
||||
from eos.saveddata.ssocharacter import SsoCharacter
|
||||
from eos.saveddata.crestchar import CrestChar
|
||||
from eos.saveddata.damagePattern import DamagePattern
|
||||
from eos.saveddata.targetProfile import TargetProfile
|
||||
from eos.saveddata.targetResists import TargetResists
|
||||
from eos.saveddata.character import Character
|
||||
from eos.saveddata.implantSet import ImplantSet
|
||||
from eos.saveddata.fit import Fit, FitLite
|
||||
from eos.saveddata.fit import Fit
|
||||
from eos.saveddata.module import Module
|
||||
from eos.saveddata.miscData import MiscData
|
||||
from eos.saveddata.override import Override
|
||||
@@ -110,9 +109,9 @@ if configVal is True:
|
||||
if type not in queryCache:
|
||||
return
|
||||
functionCache = queryCache[type]
|
||||
for _, localCache in functionCache.items():
|
||||
for _, localCache in functionCache.iteritems():
|
||||
toDelete = set()
|
||||
for cacheKey, info in localCache.items():
|
||||
for cacheKey, info in localCache.iteritems():
|
||||
IDs = info[1]
|
||||
if ID in IDs:
|
||||
toDelete.add(cacheKey)
|
||||
@@ -157,7 +156,7 @@ def getUser(lookfor, eager=None):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
user = saveddata_session.query(User).options(*eager).filter(User.ID == lookfor).first()
|
||||
elif isinstance(lookfor, str):
|
||||
elif isinstance(lookfor, basestring):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
user = saveddata_session.query(User).options(*eager).filter(User.username == lookfor).first()
|
||||
@@ -176,7 +175,7 @@ def getCharacter(lookfor, eager=None):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
character = saveddata_session.query(Character).options(*eager).filter(Character.ID == lookfor).first()
|
||||
elif isinstance(lookfor, str):
|
||||
elif isinstance(lookfor, basestring):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
character = saveddata_session.query(Character).options(*eager).filter(
|
||||
@@ -284,12 +283,6 @@ def countAllFits():
|
||||
return count
|
||||
|
||||
|
||||
def countFitGroupedByShip():
|
||||
with sd_lock:
|
||||
count = eos.db.saveddata_session.query(Fit.shipID, func.count(Fit.shipID)).group_by(Fit.shipID).all()
|
||||
return count
|
||||
|
||||
|
||||
def countFitsWithShip(lookfor, ownerID=None, where=None, eager=None):
|
||||
"""
|
||||
Get all the fits using a certain ship.
|
||||
@@ -326,17 +319,6 @@ def getFitList(eager=None):
|
||||
return fits
|
||||
|
||||
|
||||
def getFitListLite():
|
||||
with sd_lock:
|
||||
stmt = select([fits_table.c.ID, fits_table.c.name, fits_table.c.shipID])
|
||||
data = eos.db.saveddata_session.execute(stmt).fetchall()
|
||||
fits = []
|
||||
for fitID, fitName, shipID in data:
|
||||
fit = FitLite(id=fitID, name=fitName, shipID=shipID)
|
||||
fits.append(fit)
|
||||
return fits
|
||||
|
||||
|
||||
@cachedQuery(Price, 1, "typeID")
|
||||
def getPrice(typeID):
|
||||
if isinstance(typeID, int):
|
||||
@@ -355,7 +337,7 @@ def clearPrices():
|
||||
|
||||
|
||||
def getMiscData(field):
|
||||
if isinstance(field, str):
|
||||
if isinstance(field, basestring):
|
||||
with sd_lock:
|
||||
data = saveddata_session.query(MiscData).get(field)
|
||||
else:
|
||||
@@ -377,16 +359,16 @@ def clearDamagePatterns():
|
||||
return deleted_rows
|
||||
|
||||
|
||||
def getTargetProfileList(eager=None):
|
||||
def getTargetResistsList(eager=None):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
patterns = saveddata_session.query(TargetProfile).options(*eager).all()
|
||||
patterns = saveddata_session.query(TargetResists).options(*eager).all()
|
||||
return patterns
|
||||
|
||||
|
||||
def clearTargetProfiles():
|
||||
def clearTargetResists():
|
||||
with sd_lock:
|
||||
deleted_rows = saveddata_session.query(TargetProfile).delete()
|
||||
deleted_rows = saveddata_session.query(TargetResists).delete()
|
||||
commit()
|
||||
return deleted_rows
|
||||
|
||||
@@ -409,32 +391,32 @@ def getDamagePattern(lookfor, eager=None):
|
||||
with sd_lock:
|
||||
pattern = saveddata_session.query(DamagePattern).options(*eager).filter(
|
||||
DamagePattern.ID == lookfor).first()
|
||||
elif isinstance(lookfor, str):
|
||||
elif isinstance(lookfor, basestring):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
pattern = saveddata_session.query(DamagePattern).options(*eager).filter(
|
||||
DamagePattern.rawName == lookfor).first()
|
||||
DamagePattern.name == lookfor).first()
|
||||
else:
|
||||
raise TypeError("Need integer or string as argument")
|
||||
return pattern
|
||||
|
||||
|
||||
@cachedQuery(TargetProfile, 1, "lookfor")
|
||||
def getTargetProfile(lookfor, eager=None):
|
||||
@cachedQuery(TargetResists, 1, "lookfor")
|
||||
def getTargetResists(lookfor, eager=None):
|
||||
if isinstance(lookfor, int):
|
||||
if eager is None:
|
||||
with sd_lock:
|
||||
pattern = saveddata_session.query(TargetProfile).get(lookfor)
|
||||
pattern = saveddata_session.query(TargetResists).get(lookfor)
|
||||
else:
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
pattern = saveddata_session.query(TargetProfile).options(*eager).filter(
|
||||
TargetProfile.ID == lookfor).first()
|
||||
elif isinstance(lookfor, str):
|
||||
pattern = saveddata_session.query(TargetResists).options(*eager).filter(
|
||||
TargetResists.ID == lookfor).first()
|
||||
elif isinstance(lookfor, basestring):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
pattern = saveddata_session.query(TargetProfile).options(*eager).filter(
|
||||
TargetProfile.rawName == lookfor).first()
|
||||
pattern = saveddata_session.query(TargetResists).options(*eager).filter(
|
||||
TargetResists.name == lookfor).first()
|
||||
else:
|
||||
raise TypeError("Need integer or string as argument")
|
||||
return pattern
|
||||
@@ -450,27 +432,27 @@ def getImplantSet(lookfor, eager=None):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
pattern = saveddata_session.query(ImplantSet).options(*eager).filter(
|
||||
TargetProfile.ID == lookfor).first()
|
||||
elif isinstance(lookfor, str):
|
||||
TargetResists.ID == lookfor).first()
|
||||
elif isinstance(lookfor, basestring):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
pattern = saveddata_session.query(ImplantSet).options(*eager).filter(TargetProfile.name == lookfor).first()
|
||||
pattern = saveddata_session.query(ImplantSet).options(*eager).filter(TargetResists.name == lookfor).first()
|
||||
else:
|
||||
raise TypeError("Improper argument")
|
||||
return pattern
|
||||
|
||||
|
||||
def searchFits(nameLike, where=None, eager=None):
|
||||
if not isinstance(nameLike, str):
|
||||
if not isinstance(nameLike, basestring):
|
||||
raise TypeError("Need string as argument")
|
||||
# Prepare our string for request
|
||||
nameLike = "%{0}%".format(sqlizeString(nameLike))
|
||||
nameLike = u"%{0}%".format(sqlizeString(nameLike))
|
||||
|
||||
# Add any extra components to the search to our where clause
|
||||
filter = processWhere(Fit.name.like(nameLike, escape="\\"), where)
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
fits = removeInvalid(saveddata_session.query(Fit).options(*eager).filter(filter).limit(100).all())
|
||||
fits = removeInvalid(saveddata_session.query(Fit).options(*eager).filter(filter).all())
|
||||
|
||||
return fits
|
||||
|
||||
@@ -485,28 +467,29 @@ def getProjectedFits(fitID):
|
||||
raise TypeError("Need integer as argument")
|
||||
|
||||
|
||||
def getSsoCharacters(clientHash, eager=None):
|
||||
def getCrestCharacters(eager=None):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
characters = saveddata_session.query(SsoCharacter).filter(SsoCharacter.client == clientHash).options(*eager).all()
|
||||
characters = saveddata_session.query(CrestChar).options(*eager).all()
|
||||
return characters
|
||||
|
||||
|
||||
@cachedQuery(SsoCharacter, 1, "lookfor", "clientHash")
|
||||
def getSsoCharacter(lookfor, clientHash, eager=None):
|
||||
filter = SsoCharacter.client == clientHash
|
||||
|
||||
@cachedQuery(CrestChar, 1, "lookfor")
|
||||
def getCrestCharacter(lookfor, eager=None):
|
||||
if isinstance(lookfor, int):
|
||||
filter = and_(filter, SsoCharacter.ID == lookfor)
|
||||
elif isinstance(lookfor, str):
|
||||
filter = and_(filter, SsoCharacter.characterName == lookfor)
|
||||
if eager is None:
|
||||
with sd_lock:
|
||||
character = saveddata_session.query(CrestChar).get(lookfor)
|
||||
else:
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
character = saveddata_session.query(CrestChar).options(*eager).filter(CrestChar.ID == lookfor).first()
|
||||
elif isinstance(lookfor, basestring):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
character = saveddata_session.query(CrestChar).options(*eager).filter(CrestChar.name == lookfor).first()
|
||||
else:
|
||||
raise TypeError("Need integer or string as argument")
|
||||
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
character = saveddata_session.query(SsoCharacter).options(*eager).filter(filter).first()
|
||||
|
||||
return character
|
||||
|
||||
|
||||
@@ -532,8 +515,8 @@ def removeInvalid(fits):
|
||||
invalids = [f for f in fits if f.isInvalid]
|
||||
|
||||
if invalids:
|
||||
list(map(fits.remove, invalids))
|
||||
list(map(saveddata_session.delete, invalids))
|
||||
map(fits.remove, invalids)
|
||||
map(saveddata_session.delete, invalids)
|
||||
saveddata_session.commit()
|
||||
|
||||
return fits
|
||||
@@ -560,21 +543,8 @@ def commit():
|
||||
with sd_lock:
|
||||
try:
|
||||
saveddata_session.commit()
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except Exception:
|
||||
saveddata_session.rollback()
|
||||
exc_info = sys.exc_info()
|
||||
raise exc_info[0](exc_info[1]).with_traceback(exc_info[2])
|
||||
|
||||
|
||||
def flush():
|
||||
with sd_lock:
|
||||
try:
|
||||
saveddata_session.flush()
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except Exception:
|
||||
saveddata_session.rollback()
|
||||
exc_info = sys.exc_info()
|
||||
raise exc_info[0](exc_info[1]).with_traceback(exc_info[2])
|
||||
raise exc_info[0], exc_info[1], exc_info[2]
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
# ===============================================================================
|
||||
# Copyright (C) 2014 Ryan Holmes
|
||||
#
|
||||
# This file is part of eos.
|
||||
#
|
||||
# eos is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# eos is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, Float, ForeignKey, String, DateTime
|
||||
from sqlalchemy.orm import mapper
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.saveddata.targetProfile import TargetProfile
|
||||
|
||||
|
||||
targetProfiles_table = Table(
|
||||
'targetResists',
|
||||
saveddata_meta,
|
||||
Column('ID', Integer, primary_key=True),
|
||||
Column('name', String),
|
||||
Column('emAmount', Float),
|
||||
Column('thermalAmount', Float),
|
||||
Column('kineticAmount', Float),
|
||||
Column('explosiveAmount', Float),
|
||||
Column('maxVelocity', Float, nullable=True),
|
||||
Column('signatureRadius', Float, nullable=True),
|
||||
Column('radius', Float, nullable=True),
|
||||
Column('ownerID', ForeignKey('users.ID'), nullable=True),
|
||||
Column('created', DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column('modified', DateTime, nullable=True, onupdate=datetime.datetime.now))
|
||||
|
||||
mapper(
|
||||
TargetProfile,
|
||||
targetProfiles_table,
|
||||
properties={
|
||||
'rawName': targetProfiles_table.c.name,
|
||||
'_maxVelocity': targetProfiles_table.c.maxVelocity,
|
||||
'_signatureRadius': targetProfiles_table.c.signatureRadius,
|
||||
'_radius': targetProfiles_table.c.radius})
|
||||
39
eos/db/saveddata/targetResists.py
Normal file
39
eos/db/saveddata/targetResists.py
Normal file
@@ -0,0 +1,39 @@
|
||||
# ===============================================================================
|
||||
# Copyright (C) 2014 Ryan Holmes
|
||||
#
|
||||
# This file is part of eos.
|
||||
#
|
||||
# eos is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# eos is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, Float, ForeignKey, String, DateTime
|
||||
from sqlalchemy.orm import mapper
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.saveddata.targetResists import TargetResists
|
||||
|
||||
targetResists_table = Table("targetResists", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("name", String),
|
||||
Column("emAmount", Float),
|
||||
Column("thermalAmount", Float),
|
||||
Column("kineticAmount", Float),
|
||||
Column("explosiveAmount", Float),
|
||||
Column("ownerID", ForeignKey("users.ID"), nullable=True),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now)
|
||||
)
|
||||
|
||||
mapper(TargetResists, targetResists_table)
|
||||
@@ -39,7 +39,7 @@ def processEager(eager):
|
||||
return tuple()
|
||||
else:
|
||||
l = []
|
||||
if isinstance(eager, str):
|
||||
if isinstance(eager, basestring):
|
||||
eager = (eager,)
|
||||
|
||||
for e in eager:
|
||||
@@ -50,7 +50,7 @@ def processEager(eager):
|
||||
|
||||
def _replacements(eagerString):
|
||||
splitEager = eagerString.split(".")
|
||||
for i in range(len(splitEager)):
|
||||
for i in xrange(len(splitEager)):
|
||||
part = splitEager[i]
|
||||
replacement = replace.get(part)
|
||||
if replacement:
|
||||
|
||||
@@ -17,11 +17,7 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
|
||||
from logbook import Logger
|
||||
from sqlalchemy.orm.attributes import flag_dirty
|
||||
from sqlalchemy.orm.collections import collection
|
||||
|
||||
|
||||
pyfalog = Logger(__name__)
|
||||
|
||||
@@ -109,20 +105,17 @@ class HandledList(list):
|
||||
|
||||
def remove(self, thing):
|
||||
# We must flag it as modified, otherwise it not be removed from the database
|
||||
flag_dirty(thing)
|
||||
# @todo: flag_modified isn't in os x skel. need to rebuild to include
|
||||
# flag_modified(thing, "itemID")
|
||||
if thing.isInvalid: # see GH issue #324
|
||||
thing.itemID = 0
|
||||
list.remove(self, thing)
|
||||
|
||||
def sort(self, *args, **kwargs):
|
||||
# We need it here to prevent external users from accidentally sorting the list as alot of
|
||||
# external logic relies on keeping modules at their places
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class HandledModuleList(HandledList):
|
||||
|
||||
def append(self, mod):
|
||||
emptyPosition = float("Inf")
|
||||
for i in range(len(self)):
|
||||
for i in xrange(len(self)):
|
||||
currMod = self[i]
|
||||
if currMod.isEmpty and not mod.isEmpty and currMod.slot == mod.slot:
|
||||
currPos = mod.position or i
|
||||
@@ -130,90 +123,55 @@ class HandledModuleList(HandledList):
|
||||
emptyPosition = currPos
|
||||
|
||||
if emptyPosition < len(self):
|
||||
del self[emptyPosition]
|
||||
mod.position = emptyPosition
|
||||
self.__toModule(emptyPosition, mod)
|
||||
HandledList.insert(self, emptyPosition, mod)
|
||||
if mod.isInvalid:
|
||||
self.__toDummy(mod.position)
|
||||
else:
|
||||
self.appendIgnoreEmpty(mod)
|
||||
self.remove(mod)
|
||||
return
|
||||
|
||||
@collection.appender
|
||||
def appendIgnoreEmpty(self, mod):
|
||||
mod.position = len(self)
|
||||
super().append(mod)
|
||||
HandledList.append(self, mod)
|
||||
if mod.isInvalid:
|
||||
self.remove(mod)
|
||||
|
||||
def replace(self, idx, mod):
|
||||
try:
|
||||
oldMod = self[idx]
|
||||
except IndexError:
|
||||
return
|
||||
self.__toModule(idx, mod)
|
||||
if mod.isInvalid:
|
||||
self.__toModule(idx, oldMod)
|
||||
|
||||
def replaceRackPosition(self, rackPosition, mod):
|
||||
listPositions = []
|
||||
for currPos in range(len(self)):
|
||||
currMod = self[currPos]
|
||||
if currMod.slot == mod.slot:
|
||||
listPositions.append(currPos)
|
||||
listPositions.sort()
|
||||
try:
|
||||
modListPosition = listPositions[rackPosition]
|
||||
except IndexError:
|
||||
self.appendIgnoreEmpty(mod)
|
||||
else:
|
||||
oldMod = self[modListPosition]
|
||||
if mod.isEmpty:
|
||||
self.__toDummy(modListPosition)
|
||||
else:
|
||||
self.__toModule(modListPosition, mod)
|
||||
# If new module cannot be appended, restore old state
|
||||
if mod.isInvalid:
|
||||
if oldMod.isEmpty:
|
||||
self.__toDummy(modListPosition)
|
||||
else:
|
||||
self.__toModule(modListPosition, oldMod)
|
||||
|
||||
def insert(self, idx, mod):
|
||||
mod.position = idx
|
||||
i = idx
|
||||
def insert(self, index, mod):
|
||||
mod.position = index
|
||||
i = index
|
||||
while i < len(self):
|
||||
self[i].position += 1
|
||||
i += 1
|
||||
HandledList.insert(self, idx, mod)
|
||||
if mod.isInvalid:
|
||||
self.remove(mod)
|
||||
HandledList.insert(self, index, mod)
|
||||
|
||||
def remove(self, mod):
|
||||
HandledList.remove(self, mod)
|
||||
oldPos = mod.position
|
||||
|
||||
mod.position = None
|
||||
for i in range(oldPos, len(self)):
|
||||
for i in xrange(oldPos, len(self)):
|
||||
self[i].position -= 1
|
||||
|
||||
def free(self, idx):
|
||||
self.__toDummy(idx)
|
||||
|
||||
def __toDummy(self, index):
|
||||
def toDummy(self, index):
|
||||
mod = self[index]
|
||||
if not mod.isEmpty:
|
||||
dummy = mod.buildEmpty(mod.slot)
|
||||
dummy.position = index
|
||||
self[index] = dummy
|
||||
mod.position = None
|
||||
|
||||
def __toModule(self, index, mod):
|
||||
oldMod = self[index]
|
||||
def toModule(self, index, mod):
|
||||
mod.position = index
|
||||
self[index] = mod
|
||||
oldMod.position = None
|
||||
|
||||
def freeSlot(self, slot):
|
||||
for i in range(len(self)):
|
||||
mod = self[i]
|
||||
if mod.getModifiedItemAttr("subSystemSlot") == slot:
|
||||
self.toDummy(i)
|
||||
break
|
||||
|
||||
|
||||
class HandledDroneCargoList(HandledList):
|
||||
|
||||
def find(self, item):
|
||||
for o in self:
|
||||
if o.item == item:
|
||||
@@ -225,110 +183,29 @@ class HandledDroneCargoList(HandledList):
|
||||
|
||||
def append(self, thing):
|
||||
HandledList.append(self, thing)
|
||||
if thing.isInvalid:
|
||||
self.remove(thing)
|
||||
|
||||
def insert(self, idx, thing):
|
||||
HandledList.insert(self, idx, thing)
|
||||
if thing.isInvalid:
|
||||
self.remove(thing)
|
||||
|
||||
|
||||
class HandledImplantList(HandledList):
|
||||
class HandledImplantBoosterList(HandledList):
|
||||
def append(self, thing):
|
||||
if thing.isInvalid:
|
||||
HandledList.append(self, thing)
|
||||
self.remove(thing)
|
||||
return
|
||||
|
||||
def append(self, implant):
|
||||
if implant.isInvalid:
|
||||
HandledList.append(self, implant)
|
||||
self.remove(implant)
|
||||
return
|
||||
if self.__slotCheck(implant):
|
||||
HandledList.append(self, implant)
|
||||
self.remove(implant)
|
||||
return
|
||||
HandledList.append(self, implant)
|
||||
|
||||
def insert(self, idx, implant):
|
||||
if implant.isInvalid:
|
||||
HandledList.insert(self, idx, implant)
|
||||
self.remove(implant)
|
||||
return
|
||||
if self.__slotCheck(implant):
|
||||
HandledList.insert(self, idx, implant)
|
||||
self.remove(implant)
|
||||
return
|
||||
HandledList.insert(self, idx, implant)
|
||||
|
||||
def makeRoom(self, implant):
|
||||
# if needed, remove booster that was occupying slot
|
||||
oldObj = next((i for i in self if i.slot == implant.slot), None)
|
||||
if oldObj is not None:
|
||||
pyfalog.info("Slot {0} occupied with {1}, replacing with {2}", implant.slot, oldObj.item.name, implant.item.name)
|
||||
position = self.index(oldObj)
|
||||
from gui.fitCommands.helpers import ImplantInfo
|
||||
implantInfo = ImplantInfo.fromImplant(oldObj)
|
||||
oldObj = next((m for m in self if m.slot == thing.slot), None)
|
||||
if oldObj:
|
||||
pyfalog.info("Slot {0} occupied with {1}, replacing with {2}", thing.slot, oldObj.item.name, thing.item.name)
|
||||
oldObj.itemID = 0 # hack to remove from DB. See GH issue #324
|
||||
self.remove(oldObj)
|
||||
return position, implantInfo
|
||||
return None, None
|
||||
|
||||
def __slotCheck(self, implant):
|
||||
return any(i.slot == implant.slot for i in self)
|
||||
|
||||
|
||||
class HandledBoosterList(HandledList):
|
||||
|
||||
def append(self, booster):
|
||||
if booster.isInvalid:
|
||||
HandledList.append(self, booster)
|
||||
self.remove(booster)
|
||||
return
|
||||
if self.__slotCheck(booster):
|
||||
HandledList.append(self, booster)
|
||||
self.remove(booster)
|
||||
return
|
||||
HandledList.append(self, booster)
|
||||
|
||||
def insert(self, idx, booster):
|
||||
if booster.isInvalid:
|
||||
HandledList.insert(self, idx, booster)
|
||||
self.remove(booster)
|
||||
return
|
||||
if self.__slotCheck(booster):
|
||||
HandledList.insert(self, idx, booster)
|
||||
self.remove(booster)
|
||||
return
|
||||
HandledList.insert(self, idx, booster)
|
||||
|
||||
def makeRoom(self, booster):
|
||||
# if needed, remove booster that was occupying slot
|
||||
oldObj = next((b for b in self if b.slot == booster.slot), None)
|
||||
if oldObj is not None:
|
||||
pyfalog.info("Slot {0} occupied with {1}, replacing with {2}", booster.slot, oldObj.item.name, booster.item.name)
|
||||
position = self.index(oldObj)
|
||||
from gui.fitCommands.helpers import BoosterInfo
|
||||
boosterInfo = BoosterInfo.fromBooster(oldObj)
|
||||
oldObj.itemID = 0 # hack to remove from DB. See GH issue #324
|
||||
self.remove(oldObj)
|
||||
return position, boosterInfo
|
||||
return None, None
|
||||
|
||||
def __slotCheck(self, booster):
|
||||
return any(b.slot == booster.slot for b in self)
|
||||
|
||||
|
||||
class HandledSsoCharacterList(list):
|
||||
|
||||
def append(self, character):
|
||||
old = next((x for x in self if x.client == character.client), None)
|
||||
if old is not None:
|
||||
pyfalog.warning("Removing SSO Character with same hash: {}".format(repr(old)))
|
||||
list.remove(self, old)
|
||||
|
||||
list.append(self, character)
|
||||
HandledList.append(self, thing)
|
||||
|
||||
|
||||
class HandledProjectedModList(HandledList):
|
||||
|
||||
def append(self, proj):
|
||||
if proj.isInvalid:
|
||||
# we must include it before we remove it. doing it this way ensures
|
||||
@@ -336,65 +213,36 @@ class HandledProjectedModList(HandledList):
|
||||
HandledList.append(self, proj)
|
||||
self.remove(proj)
|
||||
return
|
||||
|
||||
proj.projected = True
|
||||
HandledList.append(self, proj)
|
||||
# Remove non-projectable modules
|
||||
if not proj.item.isType("projected") and not proj.isExclusiveSystemEffect:
|
||||
self.remove(proj)
|
||||
isSystemEffect = proj.item.group.name == "Effect Beacon"
|
||||
|
||||
def insert(self, idx, proj):
|
||||
if proj.isInvalid:
|
||||
# we must include it before we remove it. doing it this way ensures
|
||||
# rows and relationships in database are removed as well
|
||||
HandledList.insert(self, idx, proj)
|
||||
self.remove(proj)
|
||||
return
|
||||
proj.projected = True
|
||||
HandledList.insert(self, idx, proj)
|
||||
# Remove non-projectable modules
|
||||
if not proj.item.isType("projected") and not proj.isExclusiveSystemEffect:
|
||||
self.remove(proj)
|
||||
|
||||
@property
|
||||
def currentSystemEffect(self):
|
||||
return next((m for m in self if m.isExclusiveSystemEffect), None)
|
||||
|
||||
def makeRoom(self, proj):
|
||||
if proj.isExclusiveSystemEffect:
|
||||
if isSystemEffect:
|
||||
# remove other system effects - only 1 per fit plz
|
||||
mod = self.currentSystemEffect
|
||||
oldEffect = next((m for m in self if m.item.group.name == "Effect Beacon"), None)
|
||||
|
||||
if mod:
|
||||
pyfalog.info("System effect occupied with {0}, removing it to make space for {1}".format(mod.item.name, proj.item.name))
|
||||
position = self.index(mod)
|
||||
# We need to pack up this info, so whatever...
|
||||
from gui.fitCommands.helpers import ModuleInfo
|
||||
modInfo = ModuleInfo.fromModule(mod)
|
||||
self.remove(mod)
|
||||
return position, modInfo
|
||||
return None, None
|
||||
if oldEffect:
|
||||
pyfalog.info("System effect occupied with {0}, replacing with {1}", oldEffect.item.name, proj.item.name)
|
||||
self.remove(oldEffect)
|
||||
|
||||
HandledList.append(self, proj)
|
||||
|
||||
# Remove non-projectable modules
|
||||
if not proj.item.isType("projected") and not isSystemEffect:
|
||||
self.remove(proj)
|
||||
|
||||
|
||||
class HandledProjectedDroneList(HandledDroneCargoList):
|
||||
|
||||
def append(self, proj):
|
||||
proj.projected = True
|
||||
HandledList.append(self, proj)
|
||||
|
||||
# Remove invalid or non-projectable drones
|
||||
if proj.isInvalid or not proj.item.isType("projected"):
|
||||
self.remove(proj)
|
||||
proj.projected = False
|
||||
|
||||
def insert(self, idx, proj):
|
||||
proj.projected = True
|
||||
HandledList.insert(self, idx, proj)
|
||||
# Remove invalid or non-projectable drones
|
||||
if proj.isInvalid or not proj.item.isType("projected"):
|
||||
self.remove(proj)
|
||||
proj.projected = False
|
||||
|
||||
|
||||
class HandledItem:
|
||||
class HandledItem(object):
|
||||
def preAssignItemAttr(self, *args, **kwargs):
|
||||
self.itemModifiedAttributes.preAssign(*args, **kwargs)
|
||||
|
||||
@@ -411,7 +259,7 @@ class HandledItem:
|
||||
self.itemModifiedAttributes.force(*args, **kwargs)
|
||||
|
||||
|
||||
class HandledCharge:
|
||||
class HandledCharge(object):
|
||||
def preAssignChargeAttr(self, *args, **kwargs):
|
||||
self.chargeModifiedAttributes.preAssign(*args, **kwargs)
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user