Compare commits
2 Commits
v2.10.0
...
preview_po
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9cc50076bd | ||
|
|
f37f28d0fa |
135
.appveyor.yml
135
.appveyor.yml
@@ -1,135 +0,0 @@
|
||||
environment:
|
||||
global:
|
||||
# SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the
|
||||
# /E:ON and /V:ON options are not enabled in the batch script intepreter
|
||||
# See: http://stackoverflow.com/a/13751649/163740
|
||||
CMD_IN_ENV: "cmd /E:ON /V:ON /C .\\appveyor\\run_with_env.cmd"
|
||||
|
||||
matrix:
|
||||
|
||||
- PYTHON: "C:\\Python36"
|
||||
PYTHON_VERSION: "3.6.x"
|
||||
PYTHON_ARCH: "32"
|
||||
init:
|
||||
- ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
|
||||
install:
|
||||
# If there is a newer build queued for the same PR, cancel this one.
|
||||
# The AppVeyor 'rollout builds' option is supposed to serve the same
|
||||
# purpose but it is problematic because it tends to cancel builds pushed
|
||||
# directly to master instead of just PR builds (or the converse).
|
||||
# credits: JuliaLang developers.
|
||||
- ps: if ($env:APPVEYOR_PULL_REQUEST_NUMBER -and $env:APPVEYOR_BUILD_NUMBER -ne ((Invoke-RestMethod `
|
||||
https://ci.appveyor.com/api/projects/$env:APPVEYOR_ACCOUNT_NAME/$env:APPVEYOR_PROJECT_SLUG/history?recordsNumber=50).builds | `
|
||||
Where-Object pullRequestId -eq $env:APPVEYOR_PULL_REQUEST_NUMBER)[0].buildNumber) { `
|
||||
throw "There are newer queued builds for this pull request, failing early." }
|
||||
|
||||
- ECHO "Filesystem root:"
|
||||
- ps: "ls \"C:/\""
|
||||
|
||||
- ECHO "Filesystem projects root:"
|
||||
- ps: "ls \"C:\\projects\\\""
|
||||
|
||||
- ECHO "Filesystem pyfa root:"
|
||||
- ps: "ls \"C:\\projects\\$env:APPVEYOR_PROJECT_SLUG\""
|
||||
|
||||
- ECHO "Installed SDKs:"
|
||||
- ps: "ls \"C:/Program Files/Microsoft SDKs/Windows\""
|
||||
|
||||
# Prepend newly installed Python to the PATH of this build (this cannot be
|
||||
# done from inside the powershell script as it would require to restart
|
||||
# the parent CMD process).
|
||||
- "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
|
||||
|
||||
- "python --version"
|
||||
- "python -c \"import struct; print(struct.calcsize('P') * 8)\""
|
||||
|
||||
# Upgrade to the latest version of pip to avoid it displaying warnings
|
||||
# about it being out of date.
|
||||
- "pip install --disable-pip-version-check --user --upgrade pip"
|
||||
|
||||
# Install the build dependencies of the project. If some dependencies contain
|
||||
# compiled extensions and are not provided as pre-built wheel packages,
|
||||
# pip will build them from source using the MSVC compiler matching the
|
||||
# target Python version and architecture
|
||||
- ECHO "Install pip requirements:"
|
||||
- "pip install -r requirements.txt"
|
||||
- "pip install PyInstaller"
|
||||
|
||||
before_build:
|
||||
# directory that will contain the built files
|
||||
- ps: $env:PYFA_DIST_DIR = "c:\projects\$env:APPVEYOR_PROJECT_SLUG\dist"
|
||||
- ps: $env:PYFA_VERSION = (python ./scripts/dump_version.py)
|
||||
- ps: echo("pyfa version ")
|
||||
- ps: echo ($env:PYFA_VERSION)
|
||||
|
||||
build_script:
|
||||
- ECHO "Build pyfa:"
|
||||
|
||||
##########
|
||||
# PyInstaller - create binaries for pyfa
|
||||
##########
|
||||
# Build command for PyInstaller
|
||||
- "python -m PyInstaller --noupx --clean --windowed --noconsole -y pyfa.spec"
|
||||
# Copy over manifest (See pyfa-org/pyfa#1622)
|
||||
- ps: xcopy /y dist_assets\win\pyfa.exe.manifest $env:PYFA_DIST_DIR\pyfa\
|
||||
# Not really sure if this is needed, but why not
|
||||
- ps: xcopy /y dist_assets\win\Microsoft.VC90.CRT.manifest $env:PYFA_DIST_DIR\pyfa\
|
||||
|
||||
##########
|
||||
# InnoScript EXE building
|
||||
# This is in a separate script because I don't feel like copying over the logic to AppVeyor script right now...
|
||||
##########
|
||||
- "python dist_assets/win/dist.py"
|
||||
- ps: dir $env:PYFA_DIST_DIR/
|
||||
#- ECHO "Build pyfa (Debug):"
|
||||
#- copy C:\projects\pyfa\dist_assets\win\pyfa_debug.spec C:\projects\pyfa\pyfa_debug.spec
|
||||
#- "pyinstaller.exe --clean --noconfirm --windowed --upx-dir=C:\\projects\\pyfa\\scripts\\upx.exe C:\\projects\\pyfa\\pyfa_debug.spec"
|
||||
|
||||
build: on
|
||||
|
||||
after_build:
|
||||
- ps: "ls \"./\""
|
||||
#- ps: "ls \"C:\\projects\\pyfa\\build\\pyfa\\\""
|
||||
# - ps: "ls \"C:\\projects\\$env:APPVEYOR_PROJECT_SLUG\\build\\exe.win32-2.7\\\""
|
||||
# Zip
|
||||
# APPVEYOR_PULL_REQUEST_NUMBER -and $env:APPVEYOR_BUILD_NUMBER
|
||||
#- 7z a build.zip -r C:\projects\pyfa\build\pyfa\*.*
|
||||
- ps: 7z a "pyfa-$env:PYFA_VERSION-win.zip" -r "$env:PYFA_DIST_DIR\pyfa\*.*"
|
||||
#- 7z a pyfa_debug.zip -r C:\projects\pyfa\dist\pyfa_debug\*.*
|
||||
|
||||
on_success:
|
||||
# Do nothing right now
|
||||
|
||||
test_script:
|
||||
#- tox
|
||||
#- "py.test --cov=./"
|
||||
# Run the project tests
|
||||
# - "%CMD_IN_ENV% python C:/projects/eve-gnosis/setup.py nosetests"
|
||||
|
||||
after_test:
|
||||
# If tests are successful, create binary packages for the project.
|
||||
# - "%CMD_IN_ENV% python setup.py bdist_wheel"
|
||||
# - "%CMD_IN_ENV% python setup.py bdist_wininst"
|
||||
# - "%CMD_IN_ENV% python setup.py bdist_msi"
|
||||
# - ps: "ls dist"
|
||||
|
||||
artifacts:
|
||||
# Archive the generated packages in the ci.appveyor.com build report.
|
||||
- path: pyfa*-win.zip
|
||||
- path: pyfa*-win.exe
|
||||
#- path: pyfa_debug.zip
|
||||
# name: Pyfa_debug
|
||||
|
||||
deploy:
|
||||
tag: $(pyfa_version)
|
||||
release: pyfa $(pyfa_version)
|
||||
description: 'Release description'
|
||||
provider: GitHub
|
||||
auth_token:
|
||||
secure: BfNHO66ff5hVx2O2ORbl49X0U/5h2V2T0IuRZDwm7fd1HvsVluF0wRCbl29oRp1M
|
||||
draft: true
|
||||
on:
|
||||
APPVEYOR_REPO_TAG: true # deploy on tag push only
|
||||
#on_success:
|
||||
# - TODO: upload the content of dist/*.whl to a public wheelhouse
|
||||
#
|
||||
26
.codecov.yml
26
.codecov.yml
@@ -1,26 +0,0 @@
|
||||
codecov:
|
||||
notify:
|
||||
require_ci_to_pass: yes
|
||||
|
||||
coverage:
|
||||
precision: 2
|
||||
round: down
|
||||
range: "70...100"
|
||||
|
||||
status:
|
||||
project: yes
|
||||
patch: yes
|
||||
changes: no
|
||||
|
||||
parsers:
|
||||
gcov:
|
||||
branch_detection:
|
||||
conditional: yes
|
||||
loop: yes
|
||||
method: no
|
||||
macro: no
|
||||
|
||||
comment:
|
||||
layout: "header, diff"
|
||||
behavior: default
|
||||
require_changes: no
|
||||
40
.gitattributes
vendored
40
.gitattributes
vendored
@@ -1,40 +0,0 @@
|
||||
# Set the default behavior, in case people don't have core.autocrlf set.
|
||||
* text=auto
|
||||
|
||||
# Explicitly declare text files you want to always be normalized and converted
|
||||
# to native line endings on checkout.
|
||||
# *.c text
|
||||
# *.h text
|
||||
|
||||
# Declare files that will always have CRLF line endings on checkout.
|
||||
# Source files
|
||||
# ============
|
||||
*.pxd text eol=crlf
|
||||
*.py text eol=crlf
|
||||
*.py3 text eol=crlf
|
||||
*.pyw text eol=crlf
|
||||
*.pyx text eol=crlf
|
||||
pyfa.py text eol=lf
|
||||
|
||||
# Denote all files that are truly binary and should not be modified.
|
||||
# Binary files
|
||||
# ============
|
||||
*.db binary
|
||||
*.p binary
|
||||
*.pkl binary
|
||||
*.pyc binary
|
||||
*.pyd binary
|
||||
*.pyo binary
|
||||
|
||||
# Note: .db, .p, and .pkl files are associated
|
||||
# with the python modules ``pickle``, ``dbm.*``,
|
||||
# ``shelve``, ``marshal``, ``anydbm``, & ``bsddb``
|
||||
# (among others).
|
||||
|
||||
# Denote all files that are truly binary and should not be modified.
|
||||
# Image files
|
||||
# ============
|
||||
*.png binary
|
||||
*.jpg binary
|
||||
*.icns binary
|
||||
*.ico binary
|
||||
125
.gitignore
vendored
125
.gitignore
vendored
@@ -4,122 +4,19 @@
|
||||
#Kwrite/Gedit/Other crapapps making backups
|
||||
*~
|
||||
|
||||
#Patch files
|
||||
*.patch
|
||||
|
||||
#Personal
|
||||
/saveddata/
|
||||
|
||||
#Pyfa file
|
||||
pyfaFits.html
|
||||
|
||||
#Temporary files
|
||||
*.py__jb_tmp__
|
||||
|
||||
# Based on https://github.com/github/gitignore
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
env/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
Pyfa.egg-info/
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*,cover
|
||||
.hypothesis/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# IPython Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
||||
# dotenv
|
||||
.env
|
||||
|
||||
# virtualenv
|
||||
.venv/
|
||||
venv/
|
||||
ENV/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# Eclipse project settings
|
||||
#Eclipse
|
||||
.project
|
||||
.pydevproject
|
||||
.settings
|
||||
|
||||
# Pycharm project settings
|
||||
.idea
|
||||
eos.iml
|
||||
gitversion
|
||||
.version
|
||||
/.version
|
||||
*.swp
|
||||
#Patch files
|
||||
*.patch
|
||||
|
||||
*.fsdbinary
|
||||
#Personal
|
||||
saveddata/
|
||||
|
||||
#PyCharm
|
||||
.idea/
|
||||
|
||||
#Pyfa file
|
||||
pyfaFits.html
|
||||
|
||||
14
.mailmap
14
.mailmap
@@ -1,14 +0,0 @@
|
||||
cncfanatics <diego.duclos@gmail.com> cncfanatics <cncfanatics@titanium.(none)>
|
||||
blitzmann <holmes.ryan.90@gmail.com>
|
||||
blitzmann <holmes.ryan.90@gmail.com> blitzmann <ryan.xgamer99@gmail.com>
|
||||
blitzmann <holmes.ryan.90@gmail.com>
|
||||
blitzmann <holmes.ryan.90@gmail.com> blitzman <ryan.xgamer99@gmail.com>
|
||||
blitzmann <holmes.ryan.90@gmail.com> Ryan Holmes <ryan.holmes.90@gmail.com>
|
||||
blitzmann <holmes.ryan.90@gmail.com>
|
||||
Corollax <corollax@gmail.com> Corollax <corollax@corollax-laptop.(none)>
|
||||
Corollax <corollax@gmail.com> Corollax <corollax@corollax-N76VM.(none)>
|
||||
Mr. Nukealizer <mr.nukealizer@gmail.com> Mr. Nukealizer <MrNukealizer@users.noreply.github.com>
|
||||
DarkPhoenix <phoenix@mail.ru>
|
||||
Sakari Orisi <sakari@evefit.org>
|
||||
Will Wykeham <will@wykeham.net> Will Wykeham <will.wykeham@paconsulting.com>
|
||||
OISumeko <camerongrout@gmail.com> OISumeko <cameron@sporadic.co.nz>
|
||||
31
.travis.yml
31
.travis.yml
@@ -1,31 +0,0 @@
|
||||
os: linux
|
||||
language: python
|
||||
git:
|
||||
depth: 400
|
||||
python:
|
||||
- 3.6
|
||||
matrix:
|
||||
include:
|
||||
- os: osx
|
||||
osx_image: xcode7.3
|
||||
language: generic
|
||||
env: PYTHON=3.6.1
|
||||
before_install:
|
||||
- bash scripts/setup-osx.sh
|
||||
install:
|
||||
- export PYFA_VERSION="$(python3 scripts/dump_version.py)"
|
||||
- bash scripts/package-osx.sh
|
||||
before_deploy:
|
||||
- export RELEASE_PKG_FILE=$(ls *.deb)
|
||||
- echo "deploying $RELEASE_PKG_FILE to GitHub releases"
|
||||
deploy:
|
||||
provider: releases
|
||||
api_key:
|
||||
secure: Xfu0xApoB0zUPLXl29aYUulVC3iA4/3bXQwwADKCfAKZwxgNon4dLbO7Rie5/7Ukf2POL0KwmRaQGN3kOr+XSoIVTE4M5sXxnhiaaLGKQ+48hDizLE6JuXcZGJvkxUaghaTzIdCwHsG7VGBsPfQgfGsjJcfBp8tFNLmRyM/Jpsr8T6BR2MxtBIEUVy8zrOWFNZqnmWrY2pWMsB9fYt3JFNdpqeIgRAYqbBsBcZQ1MngLTi3ztuYS5IaF+lk06RrnBlHmUsJu/5nCvIpvPvD0i2BLZ3Uu0+Fn+8QWUgjJEL9MNseXZMXynu05xd8YRk7Ajc9CUrzQIIbAktyteYp85kE3pUJHmrMLcXhh7nqkwttR5/47Zwa3OLJLJFKBxMx6wY5jFkJjkV08850B7aWrmTFl/Eqc3Q5nZMuiEt3wFRbjxHi9h1mTN/fkxfRRHg8u3ENGPR+ZPiFC3J18qtks/B/hsKjjHvZP1i79OYlET4V/zyLyyQkCbpDaARQANuotLYJyZ7tH+KWEyRsvTi0M9Yev9mNNw6aI4vzh4HfkEhvcvnWnYwckPj1dnjQ573Qpw0Z9wsconoWfHAn+hBDt3+YLMrrFZl++mCRskHH1mZChX3aGMDi49zD0kfxBUkYPOAhguc6PwudBxHUZP+O6T/SoHylff6EizCE/k5dGeAk=
|
||||
file_glob: true
|
||||
file: "dist/pyfa-*.zip"
|
||||
skip_cleanup: true
|
||||
draft: true
|
||||
on:
|
||||
tags: true
|
||||
repo: pyfa-org/Pyfa
|
||||
@@ -1,34 +0,0 @@
|
||||
<!--
|
||||
|
||||
Submit a bug report bug report or feature request
|
||||
|
||||
Here you can inform pyfa developers of potential bugs or suggest features / improvements to the project. Please check
|
||||
to make sure that the bug hasn't been reported or feature requested before submitting. If you have general questions
|
||||
about the project and want to reach out to the developers personally, please check out out our [Slack]
|
||||
(https://pyfainvite.azurewebsites.net/).
|
||||
|
||||
-->
|
||||
|
||||
## Bug Report
|
||||
|
||||
|
||||
### Expected behavior:
|
||||
|
||||
|
||||
### Actual behavior:
|
||||
|
||||
|
||||
### Detailed steps to reproduce:
|
||||
|
||||
|
||||
### Fits involved in EFT format (Edit > To Clipboard > EFT):
|
||||
|
||||
|
||||
### Release or development git branch? Please note the release version or commit hash:
|
||||
|
||||
|
||||
### Operating system and version (eg: Windows 10, OS X 10.9, OS X 10.11, Ubuntu 16.10):
|
||||
|
||||
|
||||
### Other relevant information:
|
||||
|
||||
63
README.md
63
README.md
@@ -1,60 +1,15 @@
|
||||
# pyfa
|
||||
# Pyfa
|
||||
|
||||
[](https://pyfainvite.azurewebsites.net/) [](https://travis-ci.org/pyfa-org/Pyfa)
|
||||
Pyfa is a cross-platform desktop fitting application for EVE online that can be used natively on any platform where python and wxwidgets are available.
|
||||
|
||||

|
||||
It provides many advanced features such as graphs and full calculations of any possible combination of modules, fits, etc.
|
||||
|
||||
## What is it?
|
||||
Please see the [FAQ](https://github.com/DarkFenX/Pyfa/wiki/FAQ) for answers to common questions / concerns
|
||||
|
||||
pyfa, short for **py**thon **f**itting **a**ssistant, allows you to create, experiment with, and save ship fittings without being in game. Open source and written in Python, it is available on any platform where Python 2.x and wxWidgets are available, including Windows, Mac OS X, and Linux.
|
||||
|
||||
## Latest Version and Changelogs
|
||||
The latest version along with release notes can always be found on the project's [Releases](https://github.com/DarkFenX/Pyfa/releases) page. pyfa will notify you if you are running an outdated version.
|
||||
|
||||
## Installation
|
||||
Windows and OS X users are supplied self-contained builds of pyfa on the [latest releases](https://github.com/pyfa-org/Pyfa/releases/latest) page. An `.exe` installer is also available for Windows builds. Linux users can run pyfa using their distribution's Python interpreter. There is no official self-contained package for Linux, however, there are a number of third-party packages available through distribution-specific repositories.
|
||||
|
||||
#### OS X
|
||||
|
||||
Apart from the official release, there is also a [Homebrew](http://brew.sh) option for installing pyfa on OS X. Please note this is maintained by a third-party and is not tested by pyfa developers. Simply fire up in terminal:
|
||||
```
|
||||
$ brew install Caskroom/cask/pyfa
|
||||
```
|
||||
|
||||
### Linux Distro-specific Packages
|
||||
The following is a list of pyfa packages available for certain distributions. Please note that these packages are maintained by third-parties and are not evaluated by the pyfa developers.
|
||||
|
||||
* Arch: https://aur.archlinux.org/packages/pyfa/
|
||||
* Gentoo: https://github.com/ZeroPointEnergy/gentoo-pyfa-overlay
|
||||
|
||||
### Dependencies
|
||||
If you wish to help with development or simply need to run pyfa through a Python interpreter, the following software is required:
|
||||
|
||||
* Python 3.6
|
||||
* Requirements as listed in `requirements.txt`
|
||||
|
||||
## Bug Reporting
|
||||
The preferred method of reporting bugs is through the project's [GitHub Issues interface](https://github.com/pyfa-org/Pyfa/issues). Alternatively, posting a report in the [pyfa thread](http://forums.eveonline.com/default.aspx?g=posts&t=247609) on the official EVE Online forums is acceptable. Guidelines for bug reporting can be found on [this wiki page](https://github.com/DarkFenX/Pyfa/wiki/Bug-Reporting).
|
||||
|
||||
## License
|
||||
pyfa is licensed under the GNU GPL v3.0, see LICENSE
|
||||
|
||||
## Resources
|
||||
* Development repository: [https://github.com/pyfa-org/Pyfa](https://github.com/pyfa-org/Pyfa)
|
||||
* [EVE forum thread](https://forums.eveonline.com/t/27156)
|
||||
* [EVE University guide using pyfa](http://wiki.eveuniversity.org/Guide_to_using_PYFA)
|
||||
#### Links
|
||||
* [Development repository: http://github.com/DarkFenX/Pyfa](http://github.com/DarkFenX/Pyfa)
|
||||
* [XMPP conference:
|
||||
pyfa@conference.jabber.org](pyfa@conference.jabber.org)
|
||||
* [EVE forum thread](http://forums.eveonline.com/default.aspx?g=posts&t=247609)
|
||||
* [EVE Online website](http://www.eveonline.com/)
|
||||
|
||||
## Contacts:
|
||||
* Sable Blitzmann
|
||||
* GitHub: @blitzmann
|
||||
* [TweetFleet Slack](https://www.fuzzwork.co.uk/tweetfleet-slack-invites/): @blitzmann
|
||||
* [Gitter chat](https://gitter.im/pyfa-org/Pyfa): @ blitzmann
|
||||
* Email: sable.blitzmann@gmail.com
|
||||
* Kadesh / DarkPhoenix
|
||||
* GitHub: @DarkFenX
|
||||
* EVE: Kadesh Priestess
|
||||
* Email: phoenix@mail.ru
|
||||
|
||||
## CCP Copyright Notice
|
||||
EVE Online, the EVE logo, EVE and all associated logos and designs are the intellectual property of CCP hf. All artwork, screenshots, characters, vehicles, storylines, world facts or other recognizable features of the intellectual property relating to these trademarks are likewise the intellectual property of CCP hf. EVE Online and the EVE logo are the registered trademarks of CCP hf. All rights are reserved worldwide. All other trademarks are the property of their respective owners. CCP hf. has granted permission to pyfa to use EVE Online and all associated logos and designs for promotional and information purposes on its website but does not endorse, and is not in any way affiliated with, pyfa. CCP is in no way responsible for the content on or functioning of this program, nor can it be liable for any damage arising from the use of this program.
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
<code_scheme name="Pyfa">
|
||||
<option name="LINE_SEPARATOR" value="
" />
|
||||
<option name="RIGHT_MARGIN" value="165" />
|
||||
<Python>
|
||||
<option name="NEW_LINE_AFTER_COLON" value="true" />
|
||||
<option name="DICT_ALIGNMENT" value="2" />
|
||||
<option name="DICT_NEW_LINE_AFTER_LEFT_BRACE" value="true" />
|
||||
<option name="DICT_NEW_LINE_BEFORE_RIGHT_BRACE" value="true" />
|
||||
<option name="USE_CONTINUATION_INDENT_FOR_ARGUMENTS" value="true" />
|
||||
<option name="OPTIMIZE_IMPORTS_SORT_NAMES_IN_FROM_IMPORTS" value="true" />
|
||||
<option name="OPTIMIZE_IMPORTS_JOIN_FROM_IMPORTS_WITH_SAME_SOURCE" value="true" />
|
||||
</Python>
|
||||
</code_scheme>
|
||||
@@ -1,54 +0,0 @@
|
||||
<profile version="1.0">
|
||||
<option name="myName" value="Pyfa" />
|
||||
<inspection_tool class="IgnoreUnusedEntry" enabled="false" level="UNUSED ENTRY" enabled_by_default="false" />
|
||||
<inspection_tool class="InconsistentLineSeparators" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||
<inspection_tool class="ProblematicWhitespace" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="PyBehaveInspection" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="PyClassicStyleClassInspection" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="PyCompatibilityInspection" enabled="true" level="WARNING" enabled_by_default="true">
|
||||
<option name="ourVersions">
|
||||
<value>
|
||||
<list size="1">
|
||||
<item index="0" class="java.lang.String" itemvalue="2.7" />
|
||||
</list>
|
||||
</value>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
<inspection_tool class="PyMissingTypeHintsInspection" enabled="true" level="WEAK WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="PyPackageRequirementsInspection" enabled="true" level="WARNING" enabled_by_default="true">
|
||||
<option name="ignoredPackages">
|
||||
<value>
|
||||
<list size="1">
|
||||
<item index="0" class="java.lang.String" itemvalue="wxPython" />
|
||||
</list>
|
||||
</value>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
<inspection_tool class="PyPep8Inspection" enabled="true" level="TYPO" enabled_by_default="true">
|
||||
<option name="ignoredErrors">
|
||||
<list>
|
||||
<option value="E203" />
|
||||
<option value="E127" />
|
||||
<option value="E128" />
|
||||
<option value="E126" />
|
||||
</list>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
<inspection_tool class="PyPep8NamingInspection" enabled="true" level="TYPO" enabled_by_default="true">
|
||||
<option name="ignoredErrors">
|
||||
<list>
|
||||
<option value="N802" />
|
||||
<option value="N806" />
|
||||
<option value="N803" />
|
||||
<option value="N814" />
|
||||
</list>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
<inspection_tool class="PyShadowingBuiltinsInspection" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="PyShadowingNamesInspection" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="SpellCheckingInspection" enabled="false" level="TYPO" enabled_by_default="false">
|
||||
<option name="processCode" value="true" />
|
||||
<option name="processLiterals" value="true" />
|
||||
<option name="processComments" value="true" />
|
||||
</inspection_tool>
|
||||
</profile>
|
||||
@@ -1,146 +0,0 @@
|
||||
# noinspection PyPackageRequirements
|
||||
import pytest
|
||||
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
|
||||
from sqlalchemy import MetaData, create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
# Add root folder to python paths
|
||||
sys.path.append(os.path.realpath(os.path.join(script_dir, '..', '..')))
|
||||
sys._called_from_test = True
|
||||
|
||||
# noinspection PyUnresolvedReferences,PyUnusedLocal
|
||||
@pytest.fixture
|
||||
def DBInMemory_test():
|
||||
def rollback():
|
||||
with sd_lock:
|
||||
saveddata_session.rollback()
|
||||
|
||||
|
||||
print("Creating database in memory")
|
||||
from os.path import realpath, join, dirname, abspath
|
||||
|
||||
debug = False
|
||||
gamedataCache = True
|
||||
saveddataCache = True
|
||||
gamedata_version = ""
|
||||
gamedata_connectionstring = 'sqlite:///' + realpath(join(dirname(abspath(str(__file__))), "..", "eve.db"))
|
||||
saveddata_connectionstring = 'sqlite:///:memory:'
|
||||
|
||||
class ReadOnlyException(Exception):
|
||||
pass
|
||||
|
||||
if callable(gamedata_connectionstring):
|
||||
gamedata_engine = create_engine("sqlite://", creator=gamedata_connectionstring, echo=debug)
|
||||
else:
|
||||
gamedata_engine = create_engine(gamedata_connectionstring, echo=debug)
|
||||
|
||||
gamedata_meta = MetaData()
|
||||
gamedata_meta.bind = gamedata_engine
|
||||
gamedata_session = sessionmaker(bind=gamedata_engine, autoflush=False, expire_on_commit=False)()
|
||||
|
||||
# This should be moved elsewhere, maybe as an actual query. Current, without try-except, it breaks when making a new
|
||||
# game db because we haven't reached gamedata_meta.create_all()
|
||||
try:
|
||||
gamedata_version = gamedata_session.execute(
|
||||
"SELECT `field_value` FROM `metadata` WHERE `field_name` LIKE 'client_build'"
|
||||
).fetchone()[0]
|
||||
except Exception as e:
|
||||
print("Missing gamedata version.")
|
||||
gamedata_version = None
|
||||
|
||||
if saveddata_connectionstring is not None:
|
||||
if callable(saveddata_connectionstring):
|
||||
saveddata_engine = create_engine(creator=saveddata_connectionstring, echo=debug)
|
||||
else:
|
||||
saveddata_engine = create_engine(saveddata_connectionstring, echo=debug)
|
||||
|
||||
saveddata_meta = MetaData()
|
||||
saveddata_meta.bind = saveddata_engine
|
||||
saveddata_session = sessionmaker(bind=saveddata_engine, autoflush=False, expire_on_commit=False)()
|
||||
else:
|
||||
saveddata_meta = None
|
||||
|
||||
# Lock controlling any changes introduced to session
|
||||
sd_lock = threading.Lock()
|
||||
|
||||
# Import all the definitions for all our database stuff
|
||||
# noinspection PyPep8
|
||||
#from eos.db.gamedata import alphaClones, attribute, category, effect, group, icon, item, marketGroup, metaData, metaGroup, queries, traits, unit
|
||||
# noinspection PyPep8
|
||||
#from eos.db.saveddata import booster, cargo, character, crest, damagePattern, databaseRepair, drone, fighter, fit, implant, implantSet, loadDefaultDatabaseValues, miscData, module, override, price, queries, skill, targetProfile, user
|
||||
|
||||
# If using in memory saveddata, you'll want to reflect it so the data structure is good.
|
||||
if saveddata_connectionstring == "sqlite:///:memory:":
|
||||
saveddata_meta.create_all()
|
||||
|
||||
# Output debug info to help us troubleshoot Travis
|
||||
print(saveddata_engine)
|
||||
print(gamedata_engine)
|
||||
|
||||
helper = {
|
||||
#'config': eos.config,
|
||||
'gamedata_session' : gamedata_session,
|
||||
'saveddata_session' : saveddata_session,
|
||||
}
|
||||
return helper
|
||||
|
||||
# noinspection PyUnresolvedReferences,PyUnusedLocal
|
||||
@pytest.fixture
|
||||
def DBInMemory():
|
||||
print("Creating database in memory")
|
||||
|
||||
import eos.config
|
||||
|
||||
import eos
|
||||
import eos.db
|
||||
|
||||
# Output debug info to help us troubleshoot Travis
|
||||
print(eos.db.saveddata_engine)
|
||||
print(eos.db.gamedata_engine)
|
||||
|
||||
helper = {
|
||||
'config': eos.config,
|
||||
'db' : eos.db,
|
||||
'gamedata_session' : eos.db.gamedata_session,
|
||||
'saveddata_session' : eos.db.saveddata_session,
|
||||
}
|
||||
return helper
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def Gamedata():
|
||||
print("Building Gamedata")
|
||||
from eos.gamedata import Item
|
||||
|
||||
helper = {
|
||||
'Item': Item,
|
||||
}
|
||||
return helper
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def Saveddata():
|
||||
print("Building Saveddata")
|
||||
from eos.saveddata.ship import Ship
|
||||
from eos.saveddata.fit import Fit
|
||||
from eos.saveddata.character import Character
|
||||
from eos.saveddata.module import Module
|
||||
from eos.const import FittingModuleState
|
||||
from eos.saveddata.citadel import Citadel
|
||||
from eos.saveddata.booster import Booster
|
||||
|
||||
helper = {
|
||||
'Structure': Citadel,
|
||||
'Ship' : Ship,
|
||||
'Fit' : Fit,
|
||||
'Character': Character,
|
||||
'Module' : Module,
|
||||
'State' : FittingModuleState,
|
||||
'Booster' : Booster,
|
||||
}
|
||||
return helper
|
||||
@@ -1,65 +0,0 @@
|
||||
import pytest
|
||||
|
||||
# noinspection PyPackageRequirements
|
||||
|
||||
|
||||
# noinspection PyShadowingNames
|
||||
@pytest.fixture
|
||||
def RifterFit(DB, Gamedata, Saveddata):
|
||||
print("Creating Rifter")
|
||||
item = DB['gamedata_session'].query(Gamedata['Item']).filter(Gamedata['Item'].name == "Rifter").first()
|
||||
ship = Saveddata['Ship'](item)
|
||||
# setup fit
|
||||
fit = Saveddata['Fit'](ship, "My Rifter Fit")
|
||||
|
||||
return fit
|
||||
|
||||
|
||||
# noinspection PyShadowingNames
|
||||
@pytest.fixture
|
||||
def KeepstarFit(DB, Gamedata, Saveddata):
|
||||
print("Creating Keepstar")
|
||||
item = DB['gamedata_session'].query(Gamedata['Item']).filter(Gamedata['Item'].name == "Keepstar").first()
|
||||
ship = Saveddata['Structure'](item)
|
||||
# setup fit
|
||||
fit = Saveddata['Fit'](ship, "Keepstar Fit")
|
||||
|
||||
return fit
|
||||
|
||||
|
||||
# noinspection PyShadowingNames
|
||||
@pytest.fixture
|
||||
def CurseFit(DB, Gamedata, Saveddata):
|
||||
print("Creating Curse - With Neuts")
|
||||
item = DB['gamedata_session'].query(Gamedata['Item']).filter(Gamedata['Item'].name == "Curse").first()
|
||||
ship = Saveddata['Ship'](item)
|
||||
# setup fit
|
||||
fit = Saveddata['Fit'](ship, "Curse - With Neuts")
|
||||
|
||||
mod = Saveddata['Module'](DB['db'].getItem("Medium Energy Neutralizer II"))
|
||||
mod.state = Saveddata['State'].ONLINE
|
||||
|
||||
# Add 5 neuts
|
||||
for _ in range(5):
|
||||
fit.modules.append(mod)
|
||||
|
||||
return fit
|
||||
|
||||
|
||||
# noinspection PyShadowingNames
|
||||
@pytest.fixture
|
||||
def HeronFit(DB, Gamedata, Saveddata):
|
||||
print("Creating Heron - RemoteSebo")
|
||||
item = DB['gamedata_session'].query(Gamedata['Item']).filter(Gamedata['Item'].name == "Heron").first()
|
||||
ship = Saveddata['Ship'](item)
|
||||
# setup fit
|
||||
fit = Saveddata['Fit'](ship, "Heron - RemoteSebo")
|
||||
|
||||
mod = Saveddata['Module'](DB['db'].getItem("Remote Sensor Booster II"))
|
||||
mod.state = Saveddata['State'].ONLINE
|
||||
|
||||
# Add 5 neuts
|
||||
for _ in range(4):
|
||||
fit.modules.append(mod)
|
||||
|
||||
return fit
|
||||
@@ -1,11 +0,0 @@
|
||||
import pytest
|
||||
|
||||
# noinspection PyPackageRequirements
|
||||
|
||||
|
||||
# noinspection PyShadowingNames
|
||||
@pytest.fixture
|
||||
def StrongBluePillBooster (DB, Gamedata, Saveddata):
|
||||
print("Creating Strong Blue Pill Booster")
|
||||
item = DB['gamedata_session'].query(Gamedata['Item']).filter(Gamedata['Item'].name == "Strong Blue Pill Booster").first()
|
||||
return Saveddata['Booster'](item)
|
||||
@@ -1,101 +0,0 @@
|
||||
import os
|
||||
|
||||
# https://msdn.microsoft.com/en-us/library/windows/desktop/dd317756(v=vs.85).aspx
|
||||
windows_codecs = {
|
||||
'cp1252', # Standard Windows
|
||||
'cp1251', # Russian
|
||||
'cp037',
|
||||
'cp424',
|
||||
'cp437',
|
||||
'cp500',
|
||||
'cp720',
|
||||
'cp737',
|
||||
'cp775',
|
||||
'cp850',
|
||||
'cp852',
|
||||
'cp855',
|
||||
'cp856',
|
||||
'cp857',
|
||||
'cp858',
|
||||
'cp860',
|
||||
'cp861',
|
||||
'cp862',
|
||||
'cp863',
|
||||
'cp864',
|
||||
'cp865',
|
||||
'cp866',
|
||||
'cp869',
|
||||
'cp874',
|
||||
'cp875',
|
||||
'cp932',
|
||||
'cp949',
|
||||
'cp950',
|
||||
'cp1006',
|
||||
'cp1026',
|
||||
'cp1140',
|
||||
'cp1250',
|
||||
'cp1253',
|
||||
'cp1254',
|
||||
'cp1255',
|
||||
'cp1256',
|
||||
'cp1257',
|
||||
'cp1258',
|
||||
}
|
||||
|
||||
linux_codecs = {
|
||||
'utf_8', # Generic Linux/Mac
|
||||
}
|
||||
|
||||
mac_codecs = [
|
||||
'utf_8', # Generic Linux/Mac
|
||||
'mac_cyrillic',
|
||||
'mac_greek',
|
||||
'mac_iceland',
|
||||
'mac_latin2',
|
||||
'mac_roman',
|
||||
'mac_turkish',
|
||||
]
|
||||
|
||||
universal_codecs = [
|
||||
'utf_16', 'utf_32', 'utf_32_be', 'utf_32_le', 'utf_16_be', 'utf_16_le', 'utf_7', 'utf_8_sig',
|
||||
]
|
||||
|
||||
other_codecs = [
|
||||
'scii', 'big5', 'big5hkscs', 'euc_jp', 'euc_jis_2004', 'euc_jisx0213', 'euc_kr', 'gb2312', 'gbk', 'gb18030', 'hz', 'iso2022_jp', 'iso2022_jp_1',
|
||||
'iso2022_jp_2', 'iso2022_jp_2004', 'iso2022_jp_3', 'iso2022_jp_ext', 'iso2022_kr', 'latin_1', 'iso8859_2', 'iso8859_3', 'iso8859_4', 'iso8859_5',
|
||||
'iso8859_6', 'iso8859_7', 'iso8859_8', 'iso8859_9', 'iso8859_10', 'iso8859_11', 'iso8859_13', 'iso8859_14', 'iso8859_15', 'iso8859_16', 'johab', 'koi8_r',
|
||||
'koi8_u', 'ptcp154', 'shift_jis', 'shift_jis_2004', 'shift_jisx0213'
|
||||
]
|
||||
|
||||
system_names = {
|
||||
'Windows': windows_codecs,
|
||||
'Linux': linux_codecs,
|
||||
'Darwin': mac_codecs,
|
||||
}
|
||||
|
||||
|
||||
def GetPath(root, file=None, codec=None):
|
||||
# Replace this with the function we actually use for this
|
||||
path = os.path.realpath(os.path.abspath(root))
|
||||
|
||||
if file:
|
||||
path = os.path.join(path, file)
|
||||
|
||||
if codec:
|
||||
path = path.decode(codec)
|
||||
|
||||
return path
|
||||
|
||||
def GetUnicodePath(root, file=None, codec=None):
|
||||
# Replace this with the function we actually use for this
|
||||
path = os.path.realpath(os.path.abspath(root))
|
||||
|
||||
if file:
|
||||
path = os.path.join(path, file)
|
||||
|
||||
if codec:
|
||||
path = str(path, codec)
|
||||
else:
|
||||
path = str(path)
|
||||
|
||||
return path
|
||||
229
config.py
229
config.py
@@ -1,120 +1,47 @@
|
||||
import os
|
||||
import sys
|
||||
import yaml
|
||||
import wx
|
||||
|
||||
from logbook import CRITICAL, DEBUG, ERROR, FingersCrossedHandler, INFO, Logger, NestedSetup, NullHandler, \
|
||||
StreamHandler, TimedRotatingFileHandler, WARNING
|
||||
import hashlib
|
||||
from eos.const import FittingSlot
|
||||
|
||||
from cryptography.fernet import Fernet
|
||||
|
||||
pyfalog = Logger(__name__)
|
||||
|
||||
# Load variable overrides specific to distribution type
|
||||
try:
|
||||
import configforced
|
||||
except ImportError:
|
||||
pyfalog.warning("Failed to import: configforced")
|
||||
configforced = None
|
||||
|
||||
|
||||
# Turns on debug mode
|
||||
debug = False
|
||||
# Defines if our saveddata will be in pyfa root or not
|
||||
saveInRoot = False
|
||||
|
||||
# Version data
|
||||
version = "1.10.1"
|
||||
tag = "preview"
|
||||
expansionName = "t3d_changes"
|
||||
expansionVersion = "1.1"
|
||||
evemonMinVersion = "4081"
|
||||
|
||||
minItemSearchLength = 3
|
||||
# Database version (int ONLY)
|
||||
# Increment every time we need to flag for user database upgrade/modification
|
||||
dbversion = 6
|
||||
|
||||
pyfaPath = None
|
||||
savePath = None
|
||||
staticPath = None
|
||||
saveDB = None
|
||||
gameDB = None
|
||||
logPath = None
|
||||
loggingLevel = None
|
||||
logging_setup = None
|
||||
cipher = None
|
||||
clientHash = None
|
||||
|
||||
ESI_CACHE = 'esi_cache'
|
||||
|
||||
LOGLEVEL_MAP = {
|
||||
"critical": CRITICAL,
|
||||
"error": ERROR,
|
||||
"warning": WARNING,
|
||||
"info": INFO,
|
||||
"debug": DEBUG,
|
||||
}
|
||||
|
||||
slotColourMap = {
|
||||
FittingSlot.LOW: wx.Colour(250, 235, 204), # yellow = low slots
|
||||
FittingSlot.MED: wx.Colour(188, 215, 241), # blue = mid slots
|
||||
FittingSlot.HIGH: wx.Colour(235, 204, 209), # red = high slots
|
||||
FittingSlot.RIG: '',
|
||||
FittingSlot.SUBSYSTEM: ''
|
||||
}
|
||||
|
||||
def getClientSecret():
|
||||
return clientHash
|
||||
|
||||
|
||||
def isFrozen():
|
||||
if hasattr(sys, 'frozen'):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def __createDirs(path):
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(path)
|
||||
|
||||
|
||||
def getPyfaRoot():
|
||||
if hasattr(sys, '_MEIPASS'):
|
||||
return sys._MEIPASS
|
||||
base = getattr(sys.modules['__main__'], "__file__", sys.executable) if isFrozen() else __file__
|
||||
root = os.path.dirname(os.path.realpath(os.path.abspath(base)))
|
||||
root = root
|
||||
return root
|
||||
|
||||
|
||||
def getVersion():
|
||||
return version
|
||||
|
||||
|
||||
def getDefaultSave():
|
||||
return os.path.expanduser(os.path.join("~", ".pyfa"))
|
||||
|
||||
|
||||
def defPaths(customSavePath=None):
|
||||
global debug
|
||||
def defPaths():
|
||||
global pyfaPath
|
||||
global savePath
|
||||
global staticPath
|
||||
global saveDB
|
||||
global gameDB
|
||||
global saveInRoot
|
||||
global logPath
|
||||
global cipher
|
||||
global clientHash
|
||||
global version
|
||||
|
||||
pyfalog.debug("Configuring Pyfa")
|
||||
|
||||
# The main pyfa directory which contains run.py
|
||||
# Python 2.X uses ANSI by default, so we need to convert the character encoding
|
||||
pyfaPath = getattr(configforced, "pyfaPath", pyfaPath)
|
||||
if pyfaPath is None:
|
||||
pyfaPath = getPyfaRoot()
|
||||
|
||||
# Version data
|
||||
|
||||
with open(os.path.join(pyfaPath, "version.yml"), 'r') as file:
|
||||
data = yaml.load(file, Loader=yaml.SafeLoader)
|
||||
version = data['version']
|
||||
pyfaPath = unicode(os.path.dirname(os.path.realpath(os.path.abspath(
|
||||
sys.modules['__main__'].__file__))), sys.getfilesystemencoding())
|
||||
|
||||
# Where we store the saved fits etc, default is the current users home directory
|
||||
if saveInRoot is True:
|
||||
@@ -124,26 +51,26 @@ def defPaths(customSavePath=None):
|
||||
else:
|
||||
savePath = getattr(configforced, "savePath", None)
|
||||
if savePath is None:
|
||||
if customSavePath is None: # customSavePath is not overriden
|
||||
savePath = getDefaultSave()
|
||||
else:
|
||||
savePath = customSavePath
|
||||
savePath = unicode(os.path.expanduser(os.path.join("~", ".pyfa")),
|
||||
sys.getfilesystemencoding())
|
||||
|
||||
__createDirs(savePath)
|
||||
# Redirect stderr to file if we're requested to do so
|
||||
stderrToFile = getattr(configforced, "stderrToFile", None)
|
||||
if stderrToFile is True:
|
||||
if not os.path.exists(savePath):
|
||||
os.mkdir(savePath)
|
||||
sys.stderr = open(os.path.join(savePath, "error_log.txt"), "w")
|
||||
|
||||
secret_file = os.path.join(savePath, ".secret")
|
||||
if not os.path.exists(secret_file):
|
||||
with open(secret_file, "wb") as _file:
|
||||
_file.write(Fernet.generate_key())
|
||||
# Same for stdout
|
||||
stdoutToFile = getattr(configforced, "stdoutToFile", None)
|
||||
if stdoutToFile is True:
|
||||
if not os.path.exists(savePath):
|
||||
os.mkdir(savePath)
|
||||
sys.stdout = open(os.path.join(savePath, "output_log.txt"), "w")
|
||||
|
||||
with open(secret_file, 'rb') as fp:
|
||||
key = fp.read()
|
||||
clientHash = hashlib.sha3_256(key).hexdigest()
|
||||
cipher = Fernet(key)
|
||||
|
||||
# if isFrozen():
|
||||
# os.environ["REQUESTS_CA_BUNDLE"] = os.path.join(pyfaPath, "cacert.pem")
|
||||
# os.environ["SSL_CERT_FILE"] = os.path.join(pyfaPath, "cacert.pem")
|
||||
# Static EVE Data from the staticdata repository, should be in the staticdata
|
||||
# directory in our pyfa directory
|
||||
staticPath = os.path.join(pyfaPath, "staticdata")
|
||||
|
||||
# The database where we store all the fits etc
|
||||
saveDB = os.path.join(savePath, "saveddata.db")
|
||||
@@ -151,103 +78,13 @@ def defPaths(customSavePath=None):
|
||||
# The database where the static EVE data from the datadump is kept.
|
||||
# This is not the standard sqlite datadump but a modified version created by eos
|
||||
# maintenance script
|
||||
gameDB = getattr(configforced, "gameDB", gameDB)
|
||||
if not gameDB:
|
||||
gameDB = os.path.join(pyfaPath, "eve.db")
|
||||
gameDB = os.path.join(staticPath, "eve.db")
|
||||
|
||||
if debug:
|
||||
logFile = "pyfa_debug.log"
|
||||
else:
|
||||
logFile = "pyfa.log"
|
||||
|
||||
logPath = os.path.join(savePath, logFile)
|
||||
|
||||
# DON'T MODIFY ANYTHING BELOW
|
||||
## DON'T MODIFY ANYTHING BELOW ##
|
||||
import eos.config
|
||||
|
||||
# Caching modifiers, disable all gamedata caching, its unneeded.
|
||||
#Caching modifiers, disable all gamedata caching, its unneeded.
|
||||
eos.config.gamedataCache = False
|
||||
# saveddata db location modifier, shouldn't ever need to touch this
|
||||
eos.config.saveddata_connectionstring = "sqlite:///" + saveDB + "?check_same_thread=False"
|
||||
eos.config.gamedata_connectionstring = "sqlite:///" + gameDB + "?check_same_thread=False"
|
||||
|
||||
# initialize the settings
|
||||
from service.settings import EOSSettings
|
||||
eos.config.settings = EOSSettings.getInstance().EOSSettings # this is kind of confusing, but whatever
|
||||
|
||||
|
||||
def defLogging():
|
||||
global debug
|
||||
global logPath
|
||||
global loggingLevel
|
||||
global logging_setup
|
||||
|
||||
try:
|
||||
if debug:
|
||||
logging_setup = NestedSetup([
|
||||
# make sure we never bubble up to the stderr handler
|
||||
# if we run out of setup handling
|
||||
NullHandler(),
|
||||
StreamHandler(
|
||||
sys.stdout,
|
||||
bubble=False,
|
||||
level=loggingLevel
|
||||
),
|
||||
TimedRotatingFileHandler(
|
||||
logPath,
|
||||
level=0,
|
||||
backup_count=3,
|
||||
bubble=True,
|
||||
date_format='%Y-%m-%d',
|
||||
),
|
||||
])
|
||||
else:
|
||||
logging_setup = NestedSetup([
|
||||
# make sure we never bubble up to the stderr handler
|
||||
# if we run out of setup handling
|
||||
NullHandler(),
|
||||
FingersCrossedHandler(
|
||||
TimedRotatingFileHandler(
|
||||
logPath,
|
||||
level=0,
|
||||
backup_count=3,
|
||||
bubble=False,
|
||||
date_format='%Y-%m-%d',
|
||||
),
|
||||
action_level=ERROR,
|
||||
buffer_size=1000,
|
||||
# pull_information=True,
|
||||
# reset=False,
|
||||
)
|
||||
])
|
||||
except:
|
||||
print("Critical error attempting to setup logging. Falling back to console only.")
|
||||
logging_setup = NestedSetup([
|
||||
# make sure we never bubble up to the stderr handler
|
||||
# if we run out of setup handling
|
||||
NullHandler(),
|
||||
StreamHandler(
|
||||
sys.stdout,
|
||||
bubble=False
|
||||
)
|
||||
])
|
||||
|
||||
|
||||
class LoggerWriter:
|
||||
def __init__(self, level):
|
||||
# self.level is really like using log.debug(message)
|
||||
# at least in my case
|
||||
self.level = level
|
||||
|
||||
def write(self, message):
|
||||
# if statement reduces the amount of newlines that are
|
||||
# printed to the logger
|
||||
if message.strip() != '':
|
||||
self.level(message.replace("\n", ""))
|
||||
|
||||
def flush(self):
|
||||
# create a flush method so things can be flushed when
|
||||
# the system wants to. Not sure if simply 'printing'
|
||||
# sys.stderr is the correct way to do it, but it seemed
|
||||
# to work properly for me.
|
||||
self.level(sys.stderr)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,70 +0,0 @@
|
||||
# -*- mode: python -*-
|
||||
|
||||
import os
|
||||
from itertools import chain
|
||||
import subprocess
|
||||
|
||||
label = subprocess.check_output([
|
||||
"git", "describe", "--tags"]).strip()
|
||||
|
||||
with open('gitversion', 'w+') as f:
|
||||
f.write(label.decode())
|
||||
|
||||
block_cipher = None
|
||||
|
||||
|
||||
added_files = [
|
||||
( 'imgs/gui/*.png', 'imgs/gui' ),
|
||||
( 'imgs/gui/*.gif', 'imgs/gui' ),
|
||||
( 'imgs/icons/*.png', 'imgs/icons' ),
|
||||
( 'imgs/renders/*.png', 'imgs/renders' ),
|
||||
( 'dist_assets/win/pyfa.ico', '.' ),
|
||||
( 'dist_assets/cacert.pem', '.' ),
|
||||
( 'eve.db', '.' ),
|
||||
( 'README.md', '.' ),
|
||||
( 'LICENSE', '.' ),
|
||||
( 'gitversion', '.' ),
|
||||
]
|
||||
|
||||
import_these = []
|
||||
|
||||
# Walk directories that do dynamic importing
|
||||
paths = ('eos/effects', 'eos/db/migrations', 'service/conversions')
|
||||
for root, folders, files in chain.from_iterable(os.walk(path) for path in paths):
|
||||
for file_ in files:
|
||||
if file_.endswith(".py") and not file_.startswith("_"):
|
||||
mod_name = "{}.{}".format(
|
||||
root.replace("/", "."),
|
||||
file_.split(".py")[0],
|
||||
)
|
||||
import_these.append(mod_name)
|
||||
|
||||
|
||||
a = Analysis(['pyfa.py'],
|
||||
pathex=[],
|
||||
binaries=[],
|
||||
datas=added_files,
|
||||
hiddenimports=import_these,
|
||||
hookspath=[],
|
||||
runtime_hooks=[],
|
||||
excludes=[],
|
||||
win_no_prefer_redirects=False,
|
||||
win_private_assemblies=False,
|
||||
cipher=block_cipher)
|
||||
pyz = PYZ(a.pure, a.zipped_data,
|
||||
cipher=block_cipher)
|
||||
exe = EXE(pyz,
|
||||
a.scripts,
|
||||
exclude_binaries=True,
|
||||
name='pyfa',
|
||||
debug=False,
|
||||
strip=False,
|
||||
upx=True,
|
||||
console=True )
|
||||
coll = COLLECT(exe,
|
||||
a.binaries,
|
||||
a.zipfiles,
|
||||
a.datas,
|
||||
strip=False,
|
||||
upx=True,
|
||||
name='pyfa')
|
||||
Binary file not shown.
@@ -1,90 +0,0 @@
|
||||
# -*- mode: python -*-
|
||||
|
||||
import os
|
||||
from itertools import chain
|
||||
import subprocess
|
||||
import requests.certs
|
||||
|
||||
label = subprocess.check_output([
|
||||
"git", "describe", "--tags"]).strip()
|
||||
|
||||
with open('.version', 'w+') as f:
|
||||
f.write(label.decode())
|
||||
|
||||
block_cipher = None
|
||||
|
||||
added_files = [
|
||||
('../../imgs/gui/*.png', 'imgs/gui'),
|
||||
('../../imgs/gui/*.gif', 'imgs/gui'),
|
||||
('../../imgs/icons/*.png', 'imgs/icons'),
|
||||
('../../imgs/renders/*.png', 'imgs/renders'),
|
||||
('../../dist_assets/win/pyfa.ico', '.'),
|
||||
('../../service/jargon/*.yaml', 'service/jargon'),
|
||||
(requests.certs.where(), '.'), # is this needed anymore?
|
||||
('../../eve.db', '.'),
|
||||
('../../README.md', '.'),
|
||||
('../../LICENSE', '.'),
|
||||
('../../version.yml', '.'),
|
||||
]
|
||||
|
||||
|
||||
import_these = [
|
||||
'numpy.core._dtype_ctypes', # https://github.com/pyinstaller/pyinstaller/issues/3982
|
||||
'sqlalchemy.ext.baked' # windows build doesn't launch without if when using sqlalchemy 1.3.x
|
||||
]
|
||||
|
||||
icon = os.path.join(os.getcwd(), "dist_assets", "mac", "pyfa.icns")
|
||||
|
||||
# Walk directories that do dynamic importing
|
||||
paths = ('eos/db/migrations', 'service/conversions')
|
||||
for root, folders, files in chain.from_iterable(os.walk(path) for path in paths):
|
||||
for file_ in files:
|
||||
if file_.endswith(".py") and not file_.startswith("_"):
|
||||
mod_name = "{}.{}".format(
|
||||
root.replace("/", "."),
|
||||
file_.split(".py")[0],
|
||||
)
|
||||
import_these.append(mod_name)
|
||||
|
||||
a = Analysis([r'../../pyfa.py'],
|
||||
pathex=[],
|
||||
binaries=[],
|
||||
datas=added_files,
|
||||
hiddenimports=import_these,
|
||||
hookspath=['dist_assets/pyinstaller_hooks'],
|
||||
runtime_hooks=[],
|
||||
excludes=[],
|
||||
win_no_prefer_redirects=False,
|
||||
win_private_assemblies=False,
|
||||
cipher=block_cipher)
|
||||
|
||||
pyz = PYZ(a.pure, a.zipped_data,
|
||||
cipher=block_cipher)
|
||||
|
||||
exe = EXE(pyz,
|
||||
a.scripts,
|
||||
a.binaries,
|
||||
a.zipfiles,
|
||||
a.datas,
|
||||
name='pyfa',
|
||||
debug=False,
|
||||
strip=False,
|
||||
upx=True,
|
||||
runtime_tmpdir=None,
|
||||
console=False ,
|
||||
icon=icon,
|
||||
)
|
||||
|
||||
app = BUNDLE(
|
||||
exe,
|
||||
name='pyfa.app',
|
||||
icon=icon,
|
||||
bundle_identifier=None,
|
||||
info_plist={
|
||||
'NSHighResolutionCapable': 'True',
|
||||
'NSPrincipalClass': 'NSApplication',
|
||||
'CFBundleName': 'pyfa',
|
||||
'CFBundleDisplayName': 'pyfa',
|
||||
'CFBundleIdentifier': 'org.pyfaorg.pyfa',
|
||||
}
|
||||
)
|
||||
@@ -1,78 +0,0 @@
|
||||
# This apes hook-matplotlib.backends.py, but REMOVES backends, all but
|
||||
# the ones in the list below.
|
||||
# Courtesy of https://github.com/bpteague/cytoflow/blob/70f9291/packaging/hook-matplotlib.backends.py
|
||||
|
||||
KEEP = ["WXAgg", "WX", "agg"]
|
||||
|
||||
from PyInstaller.compat import is_darwin
|
||||
from PyInstaller.utils.hooks import (
|
||||
eval_statement, exec_statement, logger)
|
||||
|
||||
|
||||
def get_matplotlib_backend_module_names():
|
||||
"""
|
||||
List the names of all matplotlib backend modules importable under the
|
||||
current Python installation.
|
||||
Returns
|
||||
----------
|
||||
list
|
||||
List of the fully-qualified names of all such modules.
|
||||
"""
|
||||
# Statement safely importing a single backend module.
|
||||
import_statement = """
|
||||
import os, sys
|
||||
# Preserve stdout.
|
||||
sys_stdout = sys.stdout
|
||||
try:
|
||||
# Redirect output printed by this importation to "/dev/null", preventing
|
||||
# such output from being erroneously interpreted as an error.
|
||||
with open(os.devnull, 'w') as dev_null:
|
||||
sys.stdout = dev_null
|
||||
__import__('%s')
|
||||
# If this is an ImportError, print this exception's message without a traceback.
|
||||
# ImportError messages are human-readable and require no additional context.
|
||||
except ImportError as exc:
|
||||
sys.stdout = sys_stdout
|
||||
print(exc)
|
||||
# Else, print this exception preceded by a traceback. traceback.print_exc()
|
||||
# prints to stderr rather than stdout and must not be called here!
|
||||
except Exception:
|
||||
sys.stdout = sys_stdout
|
||||
import traceback
|
||||
print(traceback.format_exc())
|
||||
"""
|
||||
|
||||
# List of the human-readable names of all available backends.
|
||||
backend_names = eval_statement(
|
||||
'import matplotlib; print(matplotlib.rcsetup.all_backends)')
|
||||
|
||||
# List of the fully-qualified names of all importable backend modules.
|
||||
module_names = []
|
||||
|
||||
# If the current system is not OS X and the "CocoaAgg" backend is available,
|
||||
# remove this backend from consideration. Attempting to import this backend
|
||||
# on non-OS X systems halts the current subprocess without printing output
|
||||
# or raising exceptions, preventing its reliable detection.
|
||||
if not is_darwin and 'CocoaAgg' in backend_names:
|
||||
backend_names.remove('CocoaAgg')
|
||||
|
||||
# For safety, attempt to import each backend in a unique subprocess.
|
||||
for backend_name in backend_names:
|
||||
if backend_name in KEEP:
|
||||
continue
|
||||
|
||||
module_name = 'matplotlib.backends.backend_%s' % backend_name.lower()
|
||||
stdout = exec_statement(import_statement % module_name)
|
||||
|
||||
# If no output was printed, this backend is importable.
|
||||
if not stdout:
|
||||
module_names.append(module_name)
|
||||
logger.info(' Matplotlib backend "%s": removed' % backend_name)
|
||||
|
||||
return module_names
|
||||
|
||||
# Freeze all importable backends, as PyInstaller is unable to determine exactly
|
||||
# which backends are required by the current program.
|
||||
e=get_matplotlib_backend_module_names()
|
||||
print(e)
|
||||
excludedimports = e
|
||||
@@ -1,13 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
|
||||
<noInheritable/>
|
||||
<assemblyIdentity
|
||||
type="win32"
|
||||
name="Microsoft.VC90.CRT"
|
||||
version="9.0.21022.8"
|
||||
processorArchitecture="x86"
|
||||
publicKeyToken="1fc8b3b9a1e18e3b"/>
|
||||
<file name="MSVCR90.DLL"/>
|
||||
<file name="MSVCM90.DLL"/>
|
||||
<file name="MSVCP90.DLL"/>
|
||||
</assembly>
|
||||
@@ -1,37 +0,0 @@
|
||||
# helper script to zip up pyinstaller distribution and create installer file
|
||||
|
||||
import os.path
|
||||
from subprocess import call
|
||||
import zipfile
|
||||
from packaging.version import Version
|
||||
import yaml
|
||||
|
||||
|
||||
with open("version.yml", 'r') as file:
|
||||
data = yaml.load(file, Loader=yaml.SafeLoader)
|
||||
version = data['version']
|
||||
|
||||
os.environ["PYFA_DIST_DIR"] = os.path.join(os.getcwd(), 'dist')
|
||||
|
||||
os.environ["PYFA_VERSION"] = version
|
||||
iscc = "C:\Program Files (x86)\Inno Setup 5\ISCC.exe" # inno script location via wine
|
||||
|
||||
source = os.path.join(os.environ["PYFA_DIST_DIR"], "pyfa")
|
||||
|
||||
fileName = "pyfa-{}-win".format(os.environ["PYFA_VERSION"])
|
||||
|
||||
print("Compiling EXE")
|
||||
|
||||
v = Version(version)
|
||||
|
||||
print(v)
|
||||
|
||||
call([
|
||||
iscc,
|
||||
os.path.join(os.getcwd(), "dist_assets", "win", "pyfa-setup.iss"),
|
||||
"/dMyAppVersion=%s" % v,
|
||||
"/dMyAppDir=%s" % source,
|
||||
"/dMyOutputDir=%s" % os.path.join(os.getcwd()),
|
||||
"/dMyOutputFile=%s" % fileName]) # stdout=devnull, stderr=devnull
|
||||
|
||||
print("Done")
|
||||
@@ -1,180 +0,0 @@
|
||||
; Script generated by the Inno Setup Script Wizard.
|
||||
; SEE THE DOCUMENTATION FOR DETAILS ON CREATING INNO SETUP SCRIPT FILES!
|
||||
|
||||
; Versioning
|
||||
; we do some #ifdef conditionals because automated compilation passes these as arguments
|
||||
|
||||
#ifndef MyAppVersion
|
||||
#define MyAppVersion "2.1.0"
|
||||
#endif
|
||||
|
||||
; Other config
|
||||
|
||||
#define MyAppName "pyfa"
|
||||
#define MyAppPublisher "pyfa"
|
||||
#define MyAppURL "https://github.com/pyfa-org/Pyfa/"
|
||||
#define MyAppExeName "pyfa.exe"
|
||||
|
||||
; What version starts with the new structure (1.x.0). This is used to determine if we run directory structure cleanup
|
||||
#define MajorVersionFlag 2
|
||||
#define MinorVersionFlag 0
|
||||
|
||||
#ifndef MyOutputFile
|
||||
#define MyOutputFile LowerCase(StringChange(MyAppName+'-'+MyAppVersion+'-win', " ", "-"))
|
||||
#endif
|
||||
#ifndef MyAppDir
|
||||
#define MyAppDir "pyfa"
|
||||
#endif
|
||||
#ifndef MyOutputDir
|
||||
#define MyOutputDir "dist"
|
||||
#endif
|
||||
|
||||
[Setup]
|
||||
|
||||
; NOTE: The value of AppId uniquely identifies this application.
|
||||
; Do not use the same AppId value in installers for other applications.
|
||||
; (To generate a new GUID, click Tools | Generate GUID inside the IDE.)
|
||||
AppId={{3DA39096-C08D-49CD-90E0-1D177F32C8AA}
|
||||
AppName={#MyAppName}
|
||||
AppVersion={#MyAppVersion}
|
||||
AppPublisher={#MyAppPublisher}
|
||||
AppPublisherURL={#MyAppURL}
|
||||
AppSupportURL={#MyAppURL}
|
||||
AppUpdatesURL={#MyAppURL}
|
||||
DefaultDirName={pf}\{#MyAppName}
|
||||
DefaultGroupName={#MyAppName}
|
||||
AllowNoIcons=yes
|
||||
LicenseFile={#MyAppDir}\LICENSE
|
||||
OutputDir={#MyOutputDir}
|
||||
OutputBaseFilename={#MyOutputFile}
|
||||
SetupIconFile={#MyAppDir}\pyfa.ico
|
||||
SolidCompression=yes
|
||||
CloseApplications=yes
|
||||
|
||||
[Languages]
|
||||
Name: "english"; MessagesFile: "compiler:Default.isl"
|
||||
|
||||
[Tasks]
|
||||
Name: "desktopicon"; Description: "{cm:CreateDesktopIcon}"; GroupDescription: "{cm:AdditionalIcons}"; Flags: unchecked
|
||||
Name: "quicklaunchicon"; Description: "{cm:CreateQuickLaunchIcon}"; GroupDescription: "{cm:AdditionalIcons}"; Flags: unchecked; OnlyBelowVersion: 0,6.1
|
||||
|
||||
[Files]
|
||||
Source: "{#MyAppDir}\pyfa.exe"; DestDir: "{app}"; Flags: ignoreversion; AfterInstall: RemoveFromVirtualStore
|
||||
Source: "{#MyAppDir}\*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs createallsubdirs
|
||||
; NOTE: Don't use "Flags: ignoreversion" on any shared system files
|
||||
|
||||
[Icons]
|
||||
Name: "{group}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"
|
||||
Name: "{group}\{cm:UninstallProgram,{#MyAppName}}"; Filename: "{uninstallexe}"
|
||||
Name: "{commondesktop}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; Tasks: desktopicon
|
||||
Name: "{userappdata}\Microsoft\Internet Explorer\Quick Launch\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; Tasks: quicklaunchicon
|
||||
|
||||
[Run]
|
||||
Filename: "{app}\{#MyAppExeName}"; Description: "{cm:LaunchProgram,{#StringChange(MyAppName, '&', '&&')}}"; Flags: nowait postinstall skipifsilent
|
||||
|
||||
[InstallDelete]
|
||||
; These will delete left over generated files from 1.14 and below
|
||||
Type: filesandordirs; Name: "{app}\eos"
|
||||
Type: filesandordirs; Name: "{app}\gui"
|
||||
Type: filesandordirs; Name: "{app}\service"
|
||||
Type: filesandordirs; Name: "{app}\utils"
|
||||
Type: files; Name: "{app}\*.pyo"
|
||||
Type: files; Name: "{app}\*.pyc"
|
||||
|
||||
[Code]
|
||||
|
||||
function IsAppRunning(const FileName : string): Boolean;
|
||||
var
|
||||
FSWbemLocator: Variant;
|
||||
FWMIService : Variant;
|
||||
FWbemObjectSet: Variant;
|
||||
begin
|
||||
Result := false;
|
||||
FSWbemLocator := CreateOleObject('WBEMScripting.SWBEMLocator');
|
||||
FWMIService := FSWbemLocator.ConnectServer('', 'root\CIMV2', '', '');
|
||||
FWbemObjectSet := FWMIService.ExecQuery(Format('SELECT Name FROM Win32_Process Where Name="%s"',[FileName]));
|
||||
Result := (FWbemObjectSet.Count > 0);
|
||||
FWbemObjectSet := Unassigned;
|
||||
FWMIService := Unassigned;
|
||||
FSWbemLocator := Unassigned;
|
||||
end;
|
||||
|
||||
procedure RemoveFromVirtualStore;
|
||||
var
|
||||
VirtualStore,FileName,FilePath:String;
|
||||
DriveChars:Integer;
|
||||
begin
|
||||
VirtualStore:=AddBackslash(ExpandConstant('{localappdata}'))+'VirtualStore';
|
||||
FileName:=ExpandConstant(CurrentFileName);
|
||||
DriveChars:=Length(ExtractFileDrive(FileName));
|
||||
if DriveChars>0 then begin
|
||||
Delete(FileName,1,DriveChars);
|
||||
FileName:=VirtualStore+FileName;
|
||||
FilePath:=ExtractFilePath(FileName);
|
||||
DelTree(FilePath, True, True, True);
|
||||
end;
|
||||
end;
|
||||
|
||||
function PrepareToInstall(var NeedsRestart: Boolean): String;
|
||||
begin
|
||||
if(IsAppRunning( 'pyfa.exe' )) then
|
||||
begin
|
||||
Result := 'Please close pyfa before continuing. When closed, please go back to the previous step and continue.';
|
||||
end
|
||||
else
|
||||
begin
|
||||
Result := '';
|
||||
end
|
||||
end;
|
||||
|
||||
function GetUninstallString: string;
|
||||
var
|
||||
sUnInstPath: string;
|
||||
sUnInstallString: String;
|
||||
begin
|
||||
Result := '';
|
||||
sUnInstPath := ExpandConstant('Software\Microsoft\Windows\CurrentVersion\Uninstall\{{3DA39096-C08D-49CD-90E0-1D177F32C8AA}_is1'); //Your App GUID/ID
|
||||
sUnInstallString := '';
|
||||
if not RegQueryStringValue(HKLM, sUnInstPath, 'UninstallString', sUnInstallString) then
|
||||
RegQueryStringValue(HKCU, sUnInstPath, 'UninstallString', sUnInstallString);
|
||||
Result := sUnInstallString;
|
||||
end;
|
||||
|
||||
function IsUpgrade: Boolean;
|
||||
begin
|
||||
Result := (GetUninstallString() <> '');
|
||||
end;
|
||||
|
||||
function InitializeSetup: Boolean;
|
||||
var
|
||||
V: Integer;
|
||||
iResultCode: Integer;
|
||||
sUnInstallString: string;
|
||||
iOldVersionMajor: Cardinal;
|
||||
iOldVersionMinor: Cardinal;
|
||||
begin
|
||||
Result := True; // in case when no previous version is found
|
||||
if RegValueExists(HKEY_LOCAL_MACHINE,'Software\Microsoft\Windows\CurrentVersion\Uninstall\{3DA39096-C08D-49CD-90E0-1D177F32C8AA}_is1', 'UninstallString') then //Your App GUID/ID
|
||||
begin
|
||||
RegQueryDWordValue(HKEY_LOCAL_MACHINE,
|
||||
'Software\Microsoft\Windows\CurrentVersion\Uninstall\{3DA39096-C08D-49CD-90E0-1D177F32C8AA}_is1',
|
||||
'MajorVersion', iOldVersionMajor);
|
||||
RegQueryDWordValue(HKEY_LOCAL_MACHINE,
|
||||
'Software\Microsoft\Windows\CurrentVersion\Uninstall\{3DA39096-C08D-49CD-90E0-1D177F32C8AA}_is1',
|
||||
'MinorVersion', iOldVersionMinor);
|
||||
if (iOldVersionMajor < {#MajorVersionFlag}) or ((iOldVersionMajor = {#MajorVersionFlag}) and (iOldVersionMinor < {#MinorVersionFlag})) then // If old version with old structure is installed.
|
||||
begin
|
||||
V := MsgBox(ExpandConstant('An old version of pyfa was detected. Due to recent changes in the application structure, you must uninstall the previous version first. This will not affect your user data (saved fittings, characters, etc.). Do you want to uninstall now?'), mbInformation, MB_YESNO); //Custom Message if App installed
|
||||
if V = IDYES then
|
||||
begin
|
||||
sUnInstallString := GetUninstallString();
|
||||
sUnInstallString := RemoveQuotes(sUnInstallString);
|
||||
Exec(ExpandConstant(sUnInstallString), '', '', SW_SHOW, ewWaitUntilTerminated, iResultCode);
|
||||
Result := True; //if you want to proceed after uninstall
|
||||
//Exit; //if you want to quit after uninstall
|
||||
end
|
||||
else
|
||||
Result := False; //when older version present and not uninstalled
|
||||
end;
|
||||
end;
|
||||
end;
|
||||
@@ -1,25 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
|
||||
<assemblyIdentity name="pyfa" processorArchitecture="x86" type="win32" version="1.0.0.0"/>
|
||||
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
|
||||
<security>
|
||||
<requestedPrivileges>
|
||||
<requestedExecutionLevel level="asInvoker" uiAccess="false"></requestedExecutionLevel>
|
||||
</requestedPrivileges>
|
||||
</security>
|
||||
</trustInfo>
|
||||
<dependency>
|
||||
<dependentAssembly>
|
||||
<assemblyIdentity type="win32" name="Microsoft.VC90.CRT" version="9.0.21022.8" processorArchitecture="x86" publicKeyToken="1fc8b3b9a1e18e3b"></assemblyIdentity>
|
||||
</dependentAssembly>
|
||||
</dependency>
|
||||
<compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">
|
||||
<application>
|
||||
<supportedOS Id="{e2011457-1546-43c5-a5fe-008deee3d3f0}"/>
|
||||
<supportedOS Id="{35138b9a-5d96-4fbd-8e2d-a2440225f93a}"/>
|
||||
<supportedOS Id="{4a2f28e3-53b9-4441-ba9c-d69d4a4a6e38}"/>
|
||||
<supportedOS Id="{1f676c76-80e1-4239-95bb-83d0f6d0da78}"/>
|
||||
<supportedOS Id="{8e0f7a12-bfb3-4fe8-b9a5-48fd50a15a9a}"/>
|
||||
</application>
|
||||
</compatibility>
|
||||
</assembly>
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 76 KiB |
@@ -1,87 +0,0 @@
|
||||
# -*- mode: python -*-
|
||||
|
||||
import os
|
||||
from itertools import chain
|
||||
import subprocess
|
||||
import requests.certs
|
||||
|
||||
label = subprocess.check_output(["git", "describe", "--tags"]).strip()
|
||||
|
||||
with open('.version', 'w+') as f:
|
||||
f.write(label.decode())
|
||||
|
||||
block_cipher = None
|
||||
|
||||
added_files = [
|
||||
('../../imgs/gui/*.png', 'imgs/gui'),
|
||||
('../../imgs/gui/*.gif', 'imgs/gui'),
|
||||
('../../imgs/icons/*.png', 'imgs/icons'),
|
||||
('../../imgs/renders/*.png', 'imgs/renders'),
|
||||
('../../service/jargon/*.yaml', 'service/jargon'),
|
||||
('../../dist_assets/win/pyfa.ico', '.'),
|
||||
('../../dist_assets/win/pyfa.exe.manifest', '.'),
|
||||
('../../dist_assets/win/Microsoft.VC90.CRT.manifest', '.'),
|
||||
(requests.certs.where(), '.'), # is this needed anymore?
|
||||
('../../eve.db', '.'),
|
||||
('../../README.md', '.'),
|
||||
('../../LICENSE', '.'),
|
||||
('../../version.yml', '.'),
|
||||
]
|
||||
|
||||
import_these = [
|
||||
'numpy.core._dtype_ctypes', # https://github.com/pyinstaller/pyinstaller/issues/3982
|
||||
'sqlalchemy.ext.baked' # windows build doesn't launch without if when using sqlalchemy 1.3.x
|
||||
]
|
||||
|
||||
# Walk directories that do dynamic importing
|
||||
paths = ('eos/db/migrations', 'service/conversions')
|
||||
for root, folders, files in chain.from_iterable(os.walk(path) for path in paths):
|
||||
for file_ in files:
|
||||
if file_.endswith(".py") and not file_.startswith("_"):
|
||||
mod_name = "{}.{}".format(
|
||||
root.replace("/", "."),
|
||||
file_.split(".py")[0],
|
||||
)
|
||||
import_these.append(mod_name)
|
||||
|
||||
a = Analysis(['../../pyfa.py'],
|
||||
pathex=[
|
||||
# Need this, see https://github.com/pyinstaller/pyinstaller/issues/1566
|
||||
# To get this, download and install windows 10 SDK
|
||||
# If not building on Windows 10, this might be optional
|
||||
r'C:\Program Files (x86)\Windows Kits\10\Redist\ucrt\DLLs\x86'],
|
||||
binaries=[],
|
||||
datas=added_files,
|
||||
hiddenimports=import_these,
|
||||
hookspath=['dist_assets/pyinstaller_hooks'],
|
||||
runtime_hooks=[],
|
||||
excludes=['Tkinter'],
|
||||
win_no_prefer_redirects=False,
|
||||
win_private_assemblies=False,
|
||||
cipher=block_cipher)
|
||||
|
||||
pyz = PYZ(a.pure, a.zipped_data,
|
||||
cipher=block_cipher)
|
||||
|
||||
exe = EXE(pyz,
|
||||
a.scripts,
|
||||
exclude_binaries=True,
|
||||
debug=False,
|
||||
console=False,
|
||||
strip=False,
|
||||
upx=True,
|
||||
name='pyfa',
|
||||
icon='dist_assets/win/pyfa.ico',
|
||||
)
|
||||
|
||||
coll = COLLECT(
|
||||
exe,
|
||||
a.binaries,
|
||||
a.zipfiles,
|
||||
a.datas,
|
||||
strip=False,
|
||||
upx=True,
|
||||
name='pyfa',
|
||||
icon='dist_assets/win/pyfa.ico',
|
||||
)
|
||||
|
||||
@@ -1,83 +0,0 @@
|
||||
# -*- mode: python -*-
|
||||
|
||||
# Note: This script is provided AS-IS for those that may be interested.
|
||||
# pyfa does not currently support pyInstaller (or any other build process) 100% at the moment
|
||||
|
||||
# Command line to build:
|
||||
# (Run from directory where pyfa.py and pyfa.spec lives.)
|
||||
# c:\Python27\scripts\pyinstaller.exe --clean --noconfirm --windowed --upx-dir=.\scripts\upx.exe pyfa.spec
|
||||
|
||||
# Don't forget to change the path to where your pyfa.py and pyfa.spec lives
|
||||
# pathex=['C:\\Users\\Ebag333\\Documents\\GitHub\\Ebag333\\Pyfa'],
|
||||
|
||||
import os
|
||||
|
||||
block_cipher = None
|
||||
|
||||
added_files = [
|
||||
( 'imgs/gui/*.png', 'imgs/gui' ),
|
||||
( 'imgs/gui/*.gif', 'imgs/gui' ),
|
||||
( 'imgs/icons/*.png', 'imgs/icons' ),
|
||||
( 'imgs/renders/*.png', 'imgs/renders' ),
|
||||
( 'dist_assets/win/pyfa.ico', '.' ),
|
||||
( 'dist_assets/cacert.pem', '.' ),
|
||||
( 'eve.db', '.' ),
|
||||
( 'README.md', '.' ),
|
||||
( 'LICENSE', '.' ),
|
||||
]
|
||||
|
||||
import_these = []
|
||||
|
||||
# Walk eos.effects and add all effects so we can import them properly
|
||||
for root, folders, files in os.walk("eos/effects"):
|
||||
for file_ in files:
|
||||
if file_.endswith(".py") and not file_.startswith("_"):
|
||||
mod_name = "{}.{}".format(
|
||||
root.replace("/", "."),
|
||||
file_.split(".py")[0],
|
||||
)
|
||||
import_these.append(mod_name)
|
||||
|
||||
a = Analysis(
|
||||
['pyfa.py'],
|
||||
pathex=['C:\\projects\\pyfa\\'],
|
||||
binaries=[],
|
||||
datas=added_files,
|
||||
hiddenimports=import_these,
|
||||
hookspath=[],
|
||||
runtime_hooks=[],
|
||||
excludes=[],
|
||||
win_no_prefer_redirects=False,
|
||||
win_private_assemblies=False,
|
||||
cipher=block_cipher,
|
||||
)
|
||||
|
||||
pyz = PYZ(
|
||||
a.pure,
|
||||
a.zipped_data,
|
||||
cipher=block_cipher,
|
||||
)
|
||||
|
||||
exe = EXE(pyz,
|
||||
a.scripts,
|
||||
exclude_binaries=True,
|
||||
debug=True,
|
||||
console=True,
|
||||
strip=False,
|
||||
upx=True,
|
||||
name='pyfa_debug',
|
||||
icon='dist_assets/win/pyfa.ico',
|
||||
onefile=False,
|
||||
)
|
||||
|
||||
coll = COLLECT(
|
||||
exe,
|
||||
a.binaries,
|
||||
a.zipfiles,
|
||||
a.datas,
|
||||
strip=False,
|
||||
upx=True,
|
||||
onefile=False,
|
||||
name='pyfa_debug',
|
||||
icon='dist_assets/win/pyfa.ico',
|
||||
)
|
||||
@@ -1,45 +0,0 @@
|
||||
# UTF-8
|
||||
#
|
||||
# For more details about fixed file info 'ffi' see:
|
||||
# http://msdn.microsoft.com/en-us/library/ms646997.aspx
|
||||
VSVersionInfo(
|
||||
ffi=FixedFileInfo(
|
||||
# filevers and prodvers should be always a tuple with four items: (1, 2, 3, 4)
|
||||
# Set not needed items to zero 0.
|
||||
filevers=(1, 15, 1, 0),
|
||||
prodvers=(1, 15, 1, 0),
|
||||
# Contains a bitmask that specifies the valid bits 'flags'r
|
||||
mask=0x3f,
|
||||
# Contains a bitmask that specifies the Boolean attributes of the file.
|
||||
flags=0x0,
|
||||
# The operating system for which this file was designed.
|
||||
# 0x4 - NT and there is no need to change it.
|
||||
OS=0x40004,
|
||||
# The general type of file.
|
||||
# 0x1 - the file is an application.
|
||||
fileType=0x1,
|
||||
# The function of the file.
|
||||
# 0x0 - the function is not defined for this fileType
|
||||
subtype=0x0,
|
||||
# Creation date and time stamp.
|
||||
date=(0, 0)
|
||||
),
|
||||
kids=[
|
||||
StringFileInfo(
|
||||
[
|
||||
StringTable(
|
||||
u'040904E4',
|
||||
[StringStruct(u'LegalCopyright', u''),
|
||||
StringStruct(u'InternalName', u'pyfa.exe'),
|
||||
StringStruct(u'FileVersion', u'1.15.1.0'),
|
||||
StringStruct(u'CompanyName', u''),
|
||||
StringStruct(u'OriginalFilename', u'pyfa.exe'),
|
||||
StringStruct(u'ProductVersion', u'1.15.1.0'),
|
||||
StringStruct(u'FileDescription', u'Python fitting assistant'),
|
||||
StringStruct(u'LegalTrademarks', u''),
|
||||
StringStruct(u'Comments', u''),
|
||||
StringStruct(u'ProductName', u'pyfa')])
|
||||
]),
|
||||
VarFileInfo([VarStruct(u'Translation', [1033, 1252])])
|
||||
]
|
||||
)
|
||||
@@ -1,2 +1,7 @@
|
||||
version = "0.2.3"
|
||||
tag = "git"
|
||||
version = "0.2.3"
|
||||
tag = "git"
|
||||
|
||||
def test():
|
||||
import tests.runTests
|
||||
import unittest
|
||||
unittest.main(defaultTest="discover", testLoader=tests.runTests.loader)
|
||||
|
||||
237
eos/capSim.py
237
eos/capSim.py
@@ -1,19 +1,18 @@
|
||||
import heapq
|
||||
import time
|
||||
from math import sqrt, exp
|
||||
from collections import Counter
|
||||
import time
|
||||
|
||||
|
||||
DAY = 24 * 60 * 60 * 1000
|
||||
|
||||
|
||||
def lcm(a, b):
|
||||
n = a * b
|
||||
def lcm(a,b):
|
||||
n = a*b
|
||||
while b:
|
||||
a, b = b, a % b
|
||||
return n / a
|
||||
|
||||
|
||||
class CapSimulator:
|
||||
class CapSimulator(object):
|
||||
"""Entity's EVE Capacitor Simulator"""
|
||||
|
||||
def __init__(self):
|
||||
@@ -21,7 +20,6 @@ class CapSimulator:
|
||||
|
||||
self.capacitorCapacity = 100
|
||||
self.capacitorRecharge = 1000
|
||||
self.startingCapacity = 1000
|
||||
|
||||
# max simulated time.
|
||||
self.t_max = DAY
|
||||
@@ -42,103 +40,78 @@ class CapSimulator:
|
||||
# relevant decimal digits of capacitor for LCM period optimization
|
||||
self.stability_precision = 1
|
||||
|
||||
# Stores how cap sim changed cap values outside of cap regen time
|
||||
self.saved_changes = ()
|
||||
self.saved_changes_internal = None
|
||||
|
||||
# Reports if sim was stopped due to detecting stability early
|
||||
self.optimize_repeats = True
|
||||
self.result_optimized_repeats = None
|
||||
|
||||
def scale_activation(self, duration, capNeed):
|
||||
for res in self.scale_resolutions:
|
||||
mod = duration % res
|
||||
if mod:
|
||||
if mod > res / 2.0:
|
||||
mod = res - mod
|
||||
if mod > res/2.0:
|
||||
mod = res-mod
|
||||
else:
|
||||
mod = -mod
|
||||
|
||||
if abs(mod) <= duration / 100.0:
|
||||
if abs(mod) <= duration/100.0:
|
||||
# only adjust if the adjustment is less than 1%
|
||||
duration += mod
|
||||
capNeed += float(mod) / duration * capNeed
|
||||
capNeed += float(mod)/duration * capNeed
|
||||
break
|
||||
|
||||
return duration, capNeed
|
||||
|
||||
def init(self, modules):
|
||||
"""prepare modules. a list of (duration, capNeed, clipSize, disableStagger, reloadTime, isInjector) tuples is
|
||||
"""prepare modules. a list of (duration, capNeed, clipSize) tuples is
|
||||
expected, with clipSize 0 if the module has infinite ammo.
|
||||
"""
|
||||
self.modules = modules
|
||||
mods = {}
|
||||
for module in modules:
|
||||
if module in mods:
|
||||
mods[module] += 1
|
||||
else:
|
||||
mods[module] = 1
|
||||
|
||||
self.modules = mods
|
||||
|
||||
|
||||
def reset(self):
|
||||
"""Reset the simulator state"""
|
||||
self.state = []
|
||||
self.saved_changes_internal = {}
|
||||
self.result_optimized_repeats = False
|
||||
mods = {}
|
||||
period = 1
|
||||
disable_period = False
|
||||
|
||||
# Loop over modules, clearing clipSize if applicable, and group modules based on attributes
|
||||
for (duration, capNeed, clipSize, disableStagger, reloadTime, isInjector) in self.modules:
|
||||
for (duration, capNeed, clipSize), amount in self.modules.iteritems():
|
||||
if self.scale:
|
||||
duration, capNeed = self.scale_activation(duration, capNeed)
|
||||
|
||||
# set clipSize to infinite if reloads are disabled unless it's
|
||||
# a cap booster module
|
||||
if not self.reload and not isInjector:
|
||||
clipSize = 0
|
||||
reloadTime = 0
|
||||
|
||||
# Group modules based on their properties
|
||||
key = (duration, capNeed, clipSize, disableStagger, reloadTime, isInjector)
|
||||
if key in mods:
|
||||
mods[key] += 1
|
||||
else:
|
||||
mods[key] = 1
|
||||
|
||||
# Loop over grouped modules, configure staggering and push to the simulation state
|
||||
for (duration, capNeed, clipSize, disableStagger, reloadTime, isInjector), amount in mods.items():
|
||||
# period optimization doesn't work when reloads are active.
|
||||
if clipSize:
|
||||
disable_period = True
|
||||
# Just push multiple instances if item is injector. We do not want to stagger them as we will
|
||||
# use them as needed and want them to be available right away
|
||||
if isInjector:
|
||||
for i in range(amount):
|
||||
heapq.heappush(self.state, [0, duration, capNeed, 0, clipSize, reloadTime, isInjector])
|
||||
continue
|
||||
if self.stagger and not disableStagger:
|
||||
# Stagger all mods if they do not need to be reloaded
|
||||
if clipSize == 0:
|
||||
duration = int(duration / amount)
|
||||
# Stagger mods after first
|
||||
else:
|
||||
stagger_amount = (duration * clipSize + reloadTime) / (amount * clipSize)
|
||||
for i in range(1, amount):
|
||||
heapq.heappush(self.state, [i * stagger_amount, duration, capNeed, 0, clipSize, reloadTime, isInjector])
|
||||
# If mods are not staggered - just multiply cap use
|
||||
if self.stagger:
|
||||
duration = int(duration/amount)
|
||||
else:
|
||||
capNeed *= amount
|
||||
|
||||
period = lcm(period, duration)
|
||||
|
||||
heapq.heappush(self.state, [0, duration, capNeed, 0, clipSize, reloadTime, isInjector])
|
||||
# set clipSize to infinite if reloads are disabled unless it's
|
||||
# a cap booster module.
|
||||
if not self.reload and capNeed > 0:
|
||||
clipSize = 0
|
||||
|
||||
# period optimization doesn't work when reloads are active.
|
||||
if clipSize:
|
||||
disable_period = True
|
||||
|
||||
heapq.heappush(self.state, [0, duration, capNeed, 0, clipSize])
|
||||
|
||||
|
||||
if disable_period:
|
||||
self.period = self.t_max
|
||||
else:
|
||||
self.period = period
|
||||
|
||||
|
||||
def run(self):
|
||||
"""Run the simulation"""
|
||||
|
||||
start = time.time()
|
||||
awaitingInjectors = []
|
||||
awaitingInjectorsCounterWrap = Counter()
|
||||
|
||||
self.reset()
|
||||
|
||||
push = heapq.heappush
|
||||
@@ -153,25 +126,22 @@ class CapSimulator:
|
||||
capCapacity = self.capacitorCapacity
|
||||
tau = self.capacitorRecharge / 5.0
|
||||
|
||||
cap_wrap = self.startingCapacity # cap value at last period
|
||||
cap_lowest = self.startingCapacity # lowest cap value encountered
|
||||
cap_lowest_pre = self.startingCapacity # lowest cap value before activations
|
||||
cap = self.startingCapacity # current cap value
|
||||
t_wrap = self.period # point in time of next period
|
||||
t_last = 0
|
||||
cap_wrap = capCapacity # cap value at last period
|
||||
cap_lowest = capCapacity # lowest cap value encountered
|
||||
cap_lowest_pre = capCapacity # lowest cap value before activations
|
||||
cap = capCapacity # current cap value
|
||||
t_wrap = self.period # point in time of next period
|
||||
|
||||
t_now = t_last = 0
|
||||
t_max = self.t_max
|
||||
|
||||
while 1:
|
||||
activation = pop(state)
|
||||
t_now, duration, capNeed, shot, clipSize, reloadTime, isInjector = activation
|
||||
|
||||
# Max time reached, stop simulation - we're stable
|
||||
t_now, duration, capNeed, shot, clipSize = activation
|
||||
if t_now >= t_max:
|
||||
break
|
||||
|
||||
# Regenerate cap from last time point
|
||||
if t_now > t_last:
|
||||
cap = ((1.0 + (sqrt(cap / capCapacity) - 1.0) * exp((t_last - t_now) / tau)) ** 2) * capCapacity
|
||||
cap = ((1.0+(sqrt(cap/capCapacity)-1.0)*exp((t_last-t_now)/tau))**2)*capCapacity
|
||||
|
||||
if t_now != t_last:
|
||||
if cap < cap_lowest_pre:
|
||||
@@ -179,102 +149,35 @@ class CapSimulator:
|
||||
if t_now == t_wrap:
|
||||
# history is repeating itself, so if we have more cap now than last
|
||||
# time this happened, it is a stable setup.
|
||||
awaitingInjectorsCounterNow = Counter(awaitingInjectors)
|
||||
if self.optimize_repeats and cap >= cap_wrap and awaitingInjectorsCounterNow == awaitingInjectorsCounterWrap:
|
||||
self.result_optimized_repeats = True
|
||||
if cap >= cap_wrap:
|
||||
break
|
||||
cap_wrap = round(cap, stability_precision)
|
||||
awaitingInjectorsCounterWrap = awaitingInjectorsCounterNow
|
||||
t_wrap += period
|
||||
|
||||
t_last = t_now
|
||||
cap -= capNeed
|
||||
if cap > capCapacity:
|
||||
cap = capCapacity
|
||||
|
||||
iterations += 1
|
||||
|
||||
# If injecting cap will "overshoot" max cap, postpone it
|
||||
if isInjector and cap - capNeed > capCapacity:
|
||||
awaitingInjectors.append((duration, capNeed, shot, clipSize, reloadTime, isInjector))
|
||||
if cap < cap_lowest:
|
||||
if cap < 0.0:
|
||||
break
|
||||
cap_lowest = cap
|
||||
|
||||
else:
|
||||
# If we will need more cap than we have, but we are not at 100% -
|
||||
# use awaiting cap injectors to top us up until we have enough or
|
||||
# until we're full
|
||||
if capNeed > cap and cap < capCapacity:
|
||||
while awaitingInjectors and capNeed > cap and capCapacity > cap:
|
||||
neededInjection = min(capNeed - cap, capCapacity - cap)
|
||||
# Find injectors which have just enough cap or more
|
||||
goodInjectors = [i for i in awaitingInjectors if -i[1] >= neededInjection]
|
||||
if goodInjectors:
|
||||
# Pick injector which overshoots the least
|
||||
bestInjector = min(goodInjectors, key=lambda i: -i[1])
|
||||
else:
|
||||
# Take the one which provides the most cap
|
||||
bestInjector = max(goodInjectors, key=lambda i: -i[1])
|
||||
# Use injector
|
||||
awaitingInjectors.remove(bestInjector)
|
||||
inj_duration, inj_capNeed, inj_shot, inj_clipSize, inj_reloadTime, inj_isInjector = bestInjector
|
||||
cap -= inj_capNeed
|
||||
if cap > capCapacity:
|
||||
cap = capCapacity
|
||||
self.saved_changes_internal[t_now] = cap
|
||||
# Add injector to regular state tracker
|
||||
inj_t_now = t_now
|
||||
inj_t_now += inj_duration
|
||||
inj_shot += 1
|
||||
if inj_clipSize:
|
||||
if inj_shot % inj_clipSize == 0:
|
||||
inj_shot = 0
|
||||
inj_t_now += inj_reloadTime
|
||||
push(state, [inj_t_now, inj_duration, inj_capNeed, inj_shot, inj_clipSize, inj_reloadTime, inj_isInjector])
|
||||
t_last = t_now
|
||||
|
||||
# Apply cap modification
|
||||
cap -= capNeed
|
||||
if cap > capCapacity:
|
||||
cap = capCapacity
|
||||
self.saved_changes_internal[t_now] = cap
|
||||
# queue the next activation of this module
|
||||
t_now += duration
|
||||
shot += 1
|
||||
if clipSize:
|
||||
if shot % clipSize == 0:
|
||||
shot = 0
|
||||
t_now += 10000 # include reload time
|
||||
activation[0] = t_now
|
||||
activation[3] = shot
|
||||
|
||||
if cap < cap_lowest:
|
||||
# Negative cap - we're unstable, simulation is over
|
||||
if cap < 0.0:
|
||||
break
|
||||
cap_lowest = cap
|
||||
|
||||
# Try using awaiting injectors to top up the cap after spending some
|
||||
while awaitingInjectors and cap < capCapacity:
|
||||
neededInjection = capCapacity - cap
|
||||
# Find injectors which do not overshoot max cap
|
||||
goodInjectors = [i for i in awaitingInjectors if -i[1] <= neededInjection]
|
||||
if not goodInjectors:
|
||||
break
|
||||
# Take the one which provides the most cap
|
||||
bestInjector = max(goodInjectors, key=lambda i: -i[1])
|
||||
# Use injector
|
||||
awaitingInjectors.remove(bestInjector)
|
||||
inj_duration, inj_capNeed, inj_shot, inj_clipSize, inj_reloadTime, inj_isInjector = bestInjector
|
||||
cap -= inj_capNeed
|
||||
if cap > capCapacity:
|
||||
cap = capCapacity
|
||||
self.saved_changes_internal[t_now] = cap
|
||||
# Add injector to regular state tracker
|
||||
inj_t_now = t_now
|
||||
inj_t_now += inj_duration
|
||||
inj_shot += 1
|
||||
if inj_clipSize:
|
||||
if inj_shot % inj_clipSize == 0:
|
||||
inj_shot = 0
|
||||
inj_t_now += inj_reloadTime
|
||||
push(state, [inj_t_now, inj_duration, inj_capNeed, inj_shot, inj_clipSize, inj_reloadTime, inj_isInjector])
|
||||
|
||||
# queue the next activation of this module
|
||||
t_now += duration
|
||||
shot += 1
|
||||
if clipSize:
|
||||
if shot % clipSize == 0:
|
||||
shot = 0
|
||||
t_now += reloadTime # include reload time
|
||||
activation[0] = t_now
|
||||
activation[3] = shot
|
||||
|
||||
push(state, activation)
|
||||
push(state, activation)
|
||||
push(state, activation)
|
||||
|
||||
# update instance with relevant results.
|
||||
@@ -283,19 +186,19 @@ class CapSimulator:
|
||||
|
||||
# calculate EVE's stability value
|
||||
try:
|
||||
avgDrain = sum(x[2] / x[1] for x in self.state)
|
||||
self.cap_stable_eve = 0.25 * (1.0 + sqrt(-(2.0 * avgDrain * tau - capCapacity) / capCapacity)) ** 2
|
||||
avgDrain = reduce(float.__add__, map(lambda x: x[2]/x[1], self.state), 0.0)
|
||||
self.cap_stable_eve = 0.25 * (1.0 + sqrt(-(2.0 * avgDrain * tau - capCapacity)/capCapacity)) ** 2
|
||||
except ValueError:
|
||||
self.cap_stable_eve = 0.0
|
||||
|
||||
|
||||
if cap > 0.0:
|
||||
# capacitor low/high water marks
|
||||
self.cap_stable_low = cap_lowest
|
||||
self.cap_stable_high = cap_lowest_pre
|
||||
else:
|
||||
self.cap_stable_low = self.cap_stable_high = 0.0
|
||||
self.cap_stable_low =\
|
||||
self.cap_stable_high = 0.0
|
||||
|
||||
self.saved_changes = tuple((k / 1000, max(0, self.saved_changes_internal[k])) for k in sorted(self.saved_changes_internal))
|
||||
self.saved_changes_internal = None
|
||||
|
||||
self.runtime = time.time() - start
|
||||
self.runtime = time.time()-start
|
||||
|
||||
@@ -1,33 +1,11 @@
|
||||
import sys
|
||||
from os.path import realpath, join, dirname, abspath
|
||||
|
||||
from logbook import Logger
|
||||
import os
|
||||
|
||||
istravis = os.environ.get('TRAVIS') == 'true'
|
||||
pyfalog = Logger(__name__)
|
||||
import sys
|
||||
|
||||
debug = False
|
||||
gamedataCache = True
|
||||
saveddataCache = True
|
||||
gamedata_version = ""
|
||||
gamedata_date = ""
|
||||
gamedata_connectionstring = 'sqlite:///' + realpath(join(dirname(abspath(__file__)), "..", "eve.db"))
|
||||
pyfalog.debug("Gamedata connection string: {0}", gamedata_connectionstring)
|
||||
gamedata_connectionstring = 'sqlite:///' + unicode(realpath(join(dirname(abspath(__file__)), "..", "staticdata", "eve.db")), sys.getfilesystemencoding())
|
||||
saveddata_connectionstring = 'sqlite:///:memory:'
|
||||
|
||||
if istravis is True or hasattr(sys, '_called_from_test'):
|
||||
# Running in Travis. Run saveddata database in memory.
|
||||
saveddata_connectionstring = 'sqlite:///:memory:'
|
||||
else:
|
||||
saveddata_connectionstring = 'sqlite:///' + realpath(join(dirname(abspath(__file__)), "..", "saveddata", "saveddata.db"))
|
||||
|
||||
pyfalog.debug("Saveddata connection string: {0}", saveddata_connectionstring)
|
||||
|
||||
settings = {
|
||||
"useStaticAdaptiveArmorHardener": False,
|
||||
"strictSkillLevels": True,
|
||||
"globalDefaultSpoolupPercentage": 1.0
|
||||
}
|
||||
|
||||
# Autodetect path, only change if the autodetection bugs out.
|
||||
path = dirname(__file__)
|
||||
#Autodetect path, only change if the autodetection bugs out.
|
||||
path = dirname(unicode(__file__, sys.getfilesystemencoding()))
|
||||
|
||||
112
eos/const.py
112
eos/const.py
@@ -1,112 +0,0 @@
|
||||
# =============================================================================
|
||||
# Copyright (C) 2019 Ryan Holmes
|
||||
#
|
||||
# This file is part of pyfa.
|
||||
#
|
||||
# pyfa is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# pyfa is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
# =============================================================================
|
||||
|
||||
from enum import IntEnum,unique
|
||||
|
||||
|
||||
@unique
|
||||
class FittingSlot(IntEnum):
|
||||
"""
|
||||
Contains slots for ship fittings
|
||||
"""
|
||||
# These are self-explanatory
|
||||
LOW = 1
|
||||
MED = 2
|
||||
HIGH = 3
|
||||
RIG = 4
|
||||
SUBSYSTEM = 5
|
||||
# not a real slot, need for pyfa display rack separation
|
||||
MODE = 6
|
||||
# system effects. They are projected "modules" and pyfa assumes all modules
|
||||
# have a slot. In this case, make one up.
|
||||
SYSTEM = 7
|
||||
# used for citadel services
|
||||
SERVICE = 8
|
||||
# fighter 'slots'. Just easier to put them here...
|
||||
F_LIGHT = 10
|
||||
F_SUPPORT = 11
|
||||
F_HEAVY = 12
|
||||
# fighter 'slots' (for structures)
|
||||
FS_LIGHT = 13
|
||||
FS_SUPPORT = 14
|
||||
FS_HEAVY = 15
|
||||
|
||||
|
||||
@unique
|
||||
class ImplantLocation(IntEnum):
|
||||
"""
|
||||
Contains location of the implant
|
||||
"""
|
||||
FIT = 0
|
||||
CHARACTER = 1
|
||||
|
||||
|
||||
@unique
|
||||
class CalcType(IntEnum):
|
||||
"""
|
||||
Contains location of the calculation
|
||||
"""
|
||||
LOCAL = 0
|
||||
PROJECTED = 1
|
||||
COMMAND = 2
|
||||
|
||||
|
||||
@unique
|
||||
class FittingModuleState(IntEnum):
|
||||
"""
|
||||
Contains the state of a fitting module
|
||||
"""
|
||||
OFFLINE = -1
|
||||
ONLINE = 0
|
||||
ACTIVE = 1
|
||||
OVERHEATED = 2
|
||||
|
||||
|
||||
@unique
|
||||
class FittingHardpoint(IntEnum):
|
||||
"""
|
||||
Contains the types of a fitting hardpoint
|
||||
"""
|
||||
NONE = 0
|
||||
MISSILE = 1
|
||||
TURRET = 2
|
||||
|
||||
|
||||
@unique
|
||||
class SpoolType(IntEnum):
|
||||
SCALE = 0 # [0..1]
|
||||
TIME = 1 # Expressed via time in seconds since spool up started
|
||||
CYCLES = 2 # Expressed in amount of cycles since spool up started
|
||||
|
||||
|
||||
@unique
|
||||
class FitSystemSecurity(IntEnum):
|
||||
HISEC = 0
|
||||
LOWSEC = 1
|
||||
NULLSEC = 2
|
||||
WSPACE = 3
|
||||
|
||||
|
||||
@unique
|
||||
class Operator(IntEnum):
|
||||
PREASSIGN = 0
|
||||
PREINCREASE = 1
|
||||
MULTIPLY = 2
|
||||
POSTINCREASE = 3
|
||||
FORCE = 4
|
||||
@@ -1,4 +1,4 @@
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
@@ -15,54 +15,40 @@
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
|
||||
import threading
|
||||
|
||||
from sqlalchemy import MetaData, create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.orm import sessionmaker, scoped_session
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy import pool
|
||||
|
||||
from . import migration
|
||||
from eos import config
|
||||
from logbook import Logger
|
||||
|
||||
pyfalog = Logger(__name__)
|
||||
pyfalog.info("Initializing database")
|
||||
pyfalog.info("Gamedata connection: {0}", config.gamedata_connectionstring)
|
||||
pyfalog.info("Saveddata connection: {0}", config.saveddata_connectionstring)
|
||||
import migration
|
||||
|
||||
class ReadOnlyException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
pyfalog.debug('Initializing gamedata')
|
||||
gamedata_connectionstring = config.gamedata_connectionstring
|
||||
if callable(gamedata_connectionstring):
|
||||
gamedata_engine = create_engine("sqlite://", creator=gamedata_connectionstring, echo=config.debug)
|
||||
gamedata_engine = create_engine("sqlite://", creator=gamedata_connectionstring, echo = config.debug)
|
||||
else:
|
||||
gamedata_engine = create_engine(gamedata_connectionstring, echo=config.debug)
|
||||
gamedata_engine = create_engine(gamedata_connectionstring, echo = config.debug)
|
||||
|
||||
gamedata_meta = MetaData()
|
||||
gamedata_meta.bind = gamedata_engine
|
||||
gamedata_session = sessionmaker(bind=gamedata_engine, autoflush=False, expire_on_commit=False)()
|
||||
|
||||
pyfalog.debug('Getting gamedata version')
|
||||
# This should be moved elsewhere, maybe as an actual query. Current, without try-except, it breaks when making a new
|
||||
# game db because we haven't reached gamedata_meta.create_all()
|
||||
try:
|
||||
config.gamedata_version = gamedata_session.execute(
|
||||
"SELECT `field_value` FROM `metadata` WHERE `field_name` LIKE 'client_build'"
|
||||
).fetchone()[0]
|
||||
config.gamedata_date = gamedata_session.execute(
|
||||
"SELECT `field_value` FROM `metadata` WHERE `field_name` LIKE 'dump_time'"
|
||||
).fetchone()[0]
|
||||
except Exception as e:
|
||||
pyfalog.warning("Missing gamedata version.")
|
||||
pyfalog.critical(e)
|
||||
).fetchone()[0]
|
||||
except:
|
||||
config.gamedata_version = None
|
||||
config.gamedata_date = None
|
||||
|
||||
pyfalog.debug('Initializing saveddata')
|
||||
saveddata_connectionstring = config.saveddata_connectionstring
|
||||
if saveddata_connectionstring is not None:
|
||||
if callable(saveddata_connectionstring):
|
||||
@@ -73,35 +59,29 @@ if saveddata_connectionstring is not None:
|
||||
saveddata_meta = MetaData()
|
||||
saveddata_meta.bind = saveddata_engine
|
||||
saveddata_session = sessionmaker(bind=saveddata_engine, autoflush=False, expire_on_commit=False)()
|
||||
else:
|
||||
saveddata_meta = None
|
||||
|
||||
# Lock controlling any changes introduced to session
|
||||
sd_lock = threading.RLock()
|
||||
sd_lock = threading.Lock()
|
||||
|
||||
pyfalog.debug('Importing gamedata DB scheme')
|
||||
# Import all the definitions for all our database stuff
|
||||
# noinspection PyPep8
|
||||
from eos.db.gamedata import alphaClones, attribute, category, effect, group, item, marketGroup, metaData, metaGroup, queries, traits, unit, dynamicAttributes
|
||||
pyfalog.debug('Importing saveddata DB scheme')
|
||||
# noinspection PyPep8
|
||||
from eos.db.saveddata import booster, cargo, character, damagePattern, databaseRepair, drone, fighter, fit, implant, implantSet, loadDefaultDatabaseValues, \
|
||||
miscData, mutator, module, override, price, queries, skill, targetProfile, user
|
||||
#Import all the definitions for all our database stuff
|
||||
from eos.db.gamedata import *
|
||||
from eos.db.saveddata import *
|
||||
|
||||
pyfalog.debug('Importing gamedata queries')
|
||||
# noinspection PyPep8
|
||||
from eos.db.gamedata.queries import *
|
||||
pyfalog.debug('Importing saveddata queries')
|
||||
# noinspection PyPep8
|
||||
from eos.db.saveddata.queries import *
|
||||
#Import queries
|
||||
from eos.db.gamedata.queries import getItem, searchItems, getVariations, getItemsByCategory, directAttributeRequest, \
|
||||
getMarketGroup, getGroup, getCategory, getAttributeInfo, getMetaData, getMetaGroup
|
||||
from eos.db.saveddata.queries import getUser, getCharacter, getFit, getFitsWithShip, countFitsWithShip, searchFits, \
|
||||
getCharacterList, getPrice, getDamagePatternList, getDamagePattern, \
|
||||
getFitList, getFleetList, getFleet, save, remove, commit, add, \
|
||||
getCharactersForUser, getMiscData, getSquadsIDsWithFitID, getWing, \
|
||||
getSquad, getBoosterFits, getProjectedFits, getTargetResistsList, getTargetResists,\
|
||||
clearPrices, countAllFits
|
||||
|
||||
# If using in memory saveddata, you'll want to reflect it so the data structure is good.
|
||||
#If using in memory saveddata, you'll want to reflect it so the data structure is good.
|
||||
if config.saveddata_connectionstring == "sqlite:///:memory:":
|
||||
saveddata_meta.create_all()
|
||||
pyfalog.info("Running database out of memory.")
|
||||
|
||||
|
||||
def rollback():
|
||||
with sd_lock:
|
||||
pyfalog.warning("Session rollback triggered.")
|
||||
saveddata_session.rollback()
|
||||
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
__all__ = ["attribute", "category", "effect", "group", "metaData", "dynamicAttributes",
|
||||
"item", "marketGroup", "metaGroup", "unit", "alphaClones"]
|
||||
__all__ = ["attribute", "category", "effect", "group", "metaData",
|
||||
"icon", "item", "marketGroup", "metaGroup", "unit"]
|
||||
|
||||
@@ -1,50 +0,0 @@
|
||||
# ===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
#
|
||||
# eos is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# eos is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Column, String, Integer, Table, ForeignKey
|
||||
from sqlalchemy.orm import relation, mapper, synonym
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.gamedata import AlphaClone, AlphaCloneSkill
|
||||
|
||||
alphaclones_table = Table(
|
||||
"alphaClones",
|
||||
gamedata_meta,
|
||||
Column("alphaCloneID", Integer, primary_key=True),
|
||||
Column("alphaCloneName", String),
|
||||
)
|
||||
|
||||
alphacloneskskills_table = Table(
|
||||
"alphaCloneSkills",
|
||||
gamedata_meta,
|
||||
Column("alphaCloneID", Integer, ForeignKey("alphaClones.alphaCloneID"), primary_key=True),
|
||||
Column("typeID", Integer, primary_key=True),
|
||||
Column("level", Integer),
|
||||
)
|
||||
|
||||
mapper(AlphaClone, alphaclones_table,
|
||||
properties={
|
||||
"ID" : synonym("alphaCloneID"),
|
||||
"skills": relation(
|
||||
AlphaCloneSkill,
|
||||
cascade="all,delete-orphan",
|
||||
backref="clone")
|
||||
})
|
||||
|
||||
mapper(AlphaCloneSkill, alphacloneskskills_table)
|
||||
@@ -1,4 +1,4 @@
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
@@ -15,22 +15,20 @@
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, Float, Unicode, ForeignKey, String, Boolean
|
||||
from sqlalchemy.ext.associationproxy import association_proxy
|
||||
from sqlalchemy.orm import relation, mapper, synonym, deferred
|
||||
|
||||
from sqlalchemy.ext.associationproxy import association_proxy
|
||||
from eos.types import Attribute, Icon, AttributeInfo, Unit
|
||||
from eos.db import gamedata_meta
|
||||
from eos.gamedata import Attribute, AttributeInfo, Unit
|
||||
|
||||
typeattributes_table = Table("dgmtypeattribs", gamedata_meta,
|
||||
Column("value", Float),
|
||||
Column("typeID", Integer, ForeignKey("invtypes.typeID"), primary_key=True, index=True),
|
||||
Column("attributeID", ForeignKey("dgmattribs.attributeID"), primary_key=True))
|
||||
Column("value", Float),
|
||||
Column("typeID", Integer, ForeignKey("invtypes.typeID"), primary_key=True, index=True),
|
||||
Column("attributeID", ForeignKey("dgmattribs.attributeID"), primary_key=True))
|
||||
|
||||
attributes_table = Table("dgmattribs", gamedata_meta,
|
||||
Column("attributeID", Integer, primary_key=True),
|
||||
Column("attributeID", Integer, primary_key = True),
|
||||
Column("attributeName", String),
|
||||
Column("defaultValue", Float),
|
||||
Column("maxAttributeID", Integer, ForeignKey("dgmattribs.attributeID")),
|
||||
@@ -38,21 +36,18 @@ attributes_table = Table("dgmattribs", gamedata_meta,
|
||||
Column("published", Boolean),
|
||||
Column("displayName", String),
|
||||
Column("highIsGood", Boolean),
|
||||
Column("iconID", Integer),
|
||||
Column("attributeCategory", Integer),
|
||||
Column("tooltipDescription", Integer),
|
||||
Column("iconID", Integer, ForeignKey("icons.iconID")),
|
||||
Column("unitID", Integer, ForeignKey("dgmunits.unitID")))
|
||||
|
||||
mapper(Attribute, typeattributes_table,
|
||||
properties={"info": relation(AttributeInfo, lazy=False)})
|
||||
properties = {"info": relation(AttributeInfo, lazy=False)})
|
||||
|
||||
mapper(AttributeInfo, attributes_table,
|
||||
properties={
|
||||
"unit" : relation(Unit),
|
||||
"ID" : synonym("attributeID"),
|
||||
"name" : synonym("attributeName"),
|
||||
"description": deferred(attributes_table.c.description)
|
||||
})
|
||||
properties = {"icon" : relation(Icon),
|
||||
"unit": relation(Unit),
|
||||
"ID": synonym("attributeID"),
|
||||
"name": synonym("attributeName"),
|
||||
"description" : deferred(attributes_table.c.description)})
|
||||
|
||||
Attribute.ID = association_proxy("info", "attributeID")
|
||||
Attribute.name = association_proxy("info", "attributeName")
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
@@ -15,24 +15,23 @@
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
|
||||
from sqlalchemy import Boolean, Column, Integer, String, Table
|
||||
from sqlalchemy.orm import deferred, mapper, synonym
|
||||
from sqlalchemy import Column, String, Integer, ForeignKey, Boolean, Table
|
||||
from sqlalchemy.orm import relation, mapper, synonym, deferred
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.gamedata import Category
|
||||
from eos.types import Category, Icon
|
||||
|
||||
categories_table = Table("invcategories", gamedata_meta,
|
||||
Column("categoryID", Integer, primary_key=True),
|
||||
Column("categoryID", Integer, primary_key = True),
|
||||
Column("categoryName", String),
|
||||
Column("description", String),
|
||||
Column("published", Boolean),
|
||||
Column("iconID", Integer))
|
||||
Column("iconID", Integer, ForeignKey("icons.iconID")))
|
||||
|
||||
mapper(Category, categories_table,
|
||||
properties={
|
||||
"ID" : synonym("categoryID"),
|
||||
"name" : synonym("categoryName"),
|
||||
"description": deferred(categories_table.c.description)
|
||||
})
|
||||
properties = {"icon" : relation(Icon),
|
||||
"ID" : synonym("categoryID"),
|
||||
"name" : synonym("categoryName"),
|
||||
"description" : deferred(categories_table.c.description)})
|
||||
|
||||
@@ -1,65 +0,0 @@
|
||||
# ===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
#
|
||||
# eos is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# eos is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Column, Float, Integer, Table, ForeignKey
|
||||
from sqlalchemy.orm import mapper, relation, synonym
|
||||
from sqlalchemy.ext.associationproxy import association_proxy
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.gamedata import DynamicItem, DynamicItemAttribute, DynamicItemItem, Item
|
||||
|
||||
from eos.gamedata import AttributeInfo
|
||||
|
||||
dynamic_table = Table("mutaplasmids", gamedata_meta,
|
||||
Column("typeID", ForeignKey("invtypes.typeID"), primary_key=True, index=True),
|
||||
Column("resultingTypeID", ForeignKey("invtypes.typeID"), primary_key=True))
|
||||
|
||||
dynamicAttributes_table = Table("mutaplasmidAttributes", gamedata_meta,
|
||||
Column("typeID", Integer, ForeignKey("mutaplasmids.typeID"), primary_key=True),
|
||||
Column("attributeID", ForeignKey("dgmattribs.attributeID"), primary_key=True),
|
||||
Column("min", Float),
|
||||
Column("max", Float))
|
||||
|
||||
dynamicApplicable_table = Table("mutaplasmidItems", gamedata_meta,
|
||||
Column("typeID", ForeignKey("mutaplasmids.typeID"), primary_key=True),
|
||||
Column("applicableTypeID", ForeignKey("invtypes.typeID"), primary_key=True),)
|
||||
|
||||
mapper(DynamicItem, dynamic_table, properties={
|
||||
"attributes": relation(DynamicItemAttribute),
|
||||
"item": relation(Item, foreign_keys=[dynamic_table.c.typeID]),
|
||||
"resultingItem": relation(Item, foreign_keys=[dynamic_table.c.resultingTypeID]),
|
||||
"ID": synonym("typeID"),
|
||||
})
|
||||
|
||||
mapper(DynamicItemAttribute, dynamicAttributes_table,
|
||||
properties={"info": relation(AttributeInfo, lazy=False)})
|
||||
|
||||
mapper(DynamicItemItem, dynamicApplicable_table, properties={
|
||||
"mutaplasmid": relation(DynamicItem),
|
||||
})
|
||||
|
||||
DynamicItemAttribute.ID = association_proxy("info", "attributeID")
|
||||
DynamicItemAttribute.name = association_proxy("info", "attributeName")
|
||||
DynamicItemAttribute.description = association_proxy("info", "description")
|
||||
DynamicItemAttribute.published = association_proxy("info", "published")
|
||||
DynamicItemAttribute.displayName = association_proxy("info", "displayName")
|
||||
DynamicItemAttribute.highIsGood = association_proxy("info", "highIsGood")
|
||||
DynamicItemAttribute.iconID = association_proxy("info", "iconID")
|
||||
DynamicItemAttribute.icon = association_proxy("info", "icon")
|
||||
DynamicItemAttribute.unit = association_proxy("info", "unit")
|
||||
@@ -1,4 +1,4 @@
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
@@ -15,32 +15,36 @@
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
|
||||
from sqlalchemy import Column, String, Integer, Boolean, Table, ForeignKey
|
||||
from sqlalchemy.orm import mapper, synonym, deferred
|
||||
|
||||
from sqlalchemy.ext.associationproxy import association_proxy
|
||||
from sqlalchemy.orm import mapper, synonym, relation, deferred
|
||||
from eos.types import Effect, EffectInfo
|
||||
from eos.db import gamedata_meta
|
||||
from eos.gamedata import Effect, ItemEffect
|
||||
|
||||
typeeffects_table = Table("dgmtypeeffects", gamedata_meta,
|
||||
Column("typeID", Integer, ForeignKey("invtypes.typeID"), primary_key=True, index=True),
|
||||
Column("effectID", Integer, ForeignKey("dgmeffects.effectID"), primary_key=True))
|
||||
|
||||
effects_table = Table("dgmeffects", gamedata_meta,
|
||||
Column("effectID", Integer, primary_key=True),
|
||||
Column("effectID", Integer, primary_key = True),
|
||||
Column("effectName", String),
|
||||
Column("description", String),
|
||||
Column("published", Boolean),
|
||||
Column("isAssistance", Boolean),
|
||||
Column("isOffensive", Boolean),
|
||||
Column("resistanceID", Integer))
|
||||
Column("isOffensive", Boolean))
|
||||
|
||||
mapper(Effect, effects_table,
|
||||
properties={
|
||||
"ID" : synonym("effectID"),
|
||||
"name" : synonym("effectName"),
|
||||
"description": deferred(effects_table.c.description)
|
||||
})
|
||||
|
||||
mapper(ItemEffect, typeeffects_table)
|
||||
mapper(EffectInfo, effects_table,
|
||||
properties = {"ID" : synonym("effectID"),
|
||||
"name" : synonym("effectName"),
|
||||
"description" : deferred(effects_table.c.description)})
|
||||
|
||||
mapper(Effect, typeeffects_table,
|
||||
properties = {"ID": synonym("effectID"),
|
||||
"info": relation(EffectInfo, lazy=False)})
|
||||
|
||||
Effect.name = association_proxy("info", "name")
|
||||
Effect.description = association_proxy("info", "description")
|
||||
Effect.published = association_proxy("info", "published")
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
@@ -15,26 +15,25 @@
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
|
||||
from sqlalchemy import Column, String, Integer, Boolean, ForeignKey, Table
|
||||
from sqlalchemy.orm import relation, mapper, synonym, deferred, backref
|
||||
from sqlalchemy.orm import relation, mapper, synonym, deferred
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.gamedata import Category, Group
|
||||
from eos.types import Group, Icon, Category
|
||||
|
||||
groups_table = Table("invgroups", gamedata_meta,
|
||||
Column("groupID", Integer, primary_key=True),
|
||||
Column("groupID", Integer, primary_key = True),
|
||||
Column("groupName", String),
|
||||
Column("description", String),
|
||||
Column("published", Boolean),
|
||||
Column("categoryID", Integer, ForeignKey("invcategories.categoryID")),
|
||||
Column("iconID", Integer))
|
||||
Column("iconID", Integer, ForeignKey("icons.iconID")))
|
||||
|
||||
mapper(Group, groups_table,
|
||||
properties={
|
||||
"category" : relation(Category, backref=backref("groups", cascade="all,delete")),
|
||||
"ID" : synonym("groupID"),
|
||||
"name" : synonym("groupName"),
|
||||
"description": deferred(groups_table.c.description)
|
||||
})
|
||||
properties = {"category" : relation(Category, backref = "groups"),
|
||||
"icon" : relation(Icon),
|
||||
"ID" : synonym("groupID"),
|
||||
"name" : synonym("groupName"),
|
||||
"description" : deferred(groups_table.c.description)})
|
||||
|
||||
33
eos/db/gamedata/icon.py
Normal file
33
eos/db/gamedata/icon.py
Normal file
@@ -0,0 +1,33 @@
|
||||
#===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
#
|
||||
# eos is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# eos is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
#===============================================================================
|
||||
|
||||
from sqlalchemy import Column, String, Integer, Table
|
||||
from sqlalchemy.orm import mapper, synonym, deferred
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.types import Icon
|
||||
|
||||
icons_table = Table("icons", gamedata_meta,
|
||||
Column("iconID", Integer, primary_key = True),
|
||||
Column("description", String),
|
||||
Column("iconFile", String))
|
||||
|
||||
mapper(Icon, icons_table,
|
||||
properties = {"ID" : synonym("iconID"),
|
||||
"description" : deferred(icons_table.c.description)})
|
||||
@@ -1,4 +1,4 @@
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
@@ -15,20 +15,18 @@
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
|
||||
from sqlalchemy import Boolean, Column, Float, ForeignKey, Integer, String, Table
|
||||
from sqlalchemy import Column, String, Integer, Boolean, ForeignKey, Table, Float
|
||||
from sqlalchemy.orm import relation, mapper, synonym, deferred
|
||||
from sqlalchemy.ext.associationproxy import association_proxy
|
||||
from sqlalchemy.orm import backref, deferred, mapper, relation, synonym
|
||||
from sqlalchemy.orm.collections import attribute_mapped_collection
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.db.gamedata.dynamicAttributes import dynamicApplicable_table
|
||||
from eos.db.gamedata.effect import typeeffects_table
|
||||
from eos.gamedata import Attribute, DynamicItem, Effect, Group, Item, MetaType, Traits
|
||||
from eos.types import Icon, Attribute, Item, Effect, MetaType, Group, Traits
|
||||
|
||||
items_table = Table("invtypes", gamedata_meta,
|
||||
Column("typeID", Integer, primary_key=True),
|
||||
Column("typeID", Integer, primary_key = True),
|
||||
Column("typeName", String, index=True),
|
||||
Column("description", String),
|
||||
Column("raceID", Integer),
|
||||
@@ -38,33 +36,26 @@ items_table = Table("invtypes", gamedata_meta,
|
||||
Column("capacity", Float),
|
||||
Column("published", Boolean),
|
||||
Column("marketGroupID", Integer, ForeignKey("invmarketgroups.marketGroupID")),
|
||||
Column("iconID", Integer),
|
||||
Column("graphicID", Integer),
|
||||
Column("groupID", Integer, ForeignKey("invgroups.groupID"), index=True),
|
||||
Column("replacements", String))
|
||||
Column("iconID", Integer, ForeignKey("icons.iconID")),
|
||||
Column("groupID", Integer, ForeignKey("invgroups.groupID"), index=True))
|
||||
|
||||
from .metaGroup import metatypes_table # noqa
|
||||
from .traits import traits_table # noqa
|
||||
from .metaGroup import metatypes_table
|
||||
from .traits import traits_table
|
||||
|
||||
mapper(Item, items_table,
|
||||
properties={
|
||||
"group" : relation(Group, backref=backref("items", cascade="all,delete")),
|
||||
"_Item__attributes": relation(Attribute, cascade='all, delete, delete-orphan', collection_class=attribute_mapped_collection('name')),
|
||||
"effects": relation(Effect, secondary=typeeffects_table, collection_class=attribute_mapped_collection('name')),
|
||||
"metaGroup" : relation(MetaType,
|
||||
primaryjoin=metatypes_table.c.typeID == items_table.c.typeID,
|
||||
uselist=False),
|
||||
"ID" : synonym("typeID"),
|
||||
"name" : synonym("typeName"),
|
||||
"description" : deferred(items_table.c.description),
|
||||
"traits" : relation(Traits,
|
||||
primaryjoin=traits_table.c.typeID == items_table.c.typeID,
|
||||
uselist=False),
|
||||
"mutaplasmids": relation(DynamicItem,
|
||||
primaryjoin=dynamicApplicable_table.c.applicableTypeID == items_table.c.typeID,
|
||||
secondaryjoin=dynamicApplicable_table.c.typeID == DynamicItem.typeID,
|
||||
secondary=dynamicApplicable_table,
|
||||
backref="applicableItems")
|
||||
})
|
||||
properties = {"group" : relation(Group, backref = "items"),
|
||||
"icon" : relation(Icon),
|
||||
"_Item__attributes" : relation(Attribute, collection_class = attribute_mapped_collection('name')),
|
||||
"effects" : relation(Effect, collection_class = attribute_mapped_collection('name')),
|
||||
"metaGroup" : relation(MetaType,
|
||||
primaryjoin = metatypes_table.c.typeID == items_table.c.typeID,
|
||||
uselist = False),
|
||||
"ID" : synonym("typeID"),
|
||||
"name" : synonym("typeName"),
|
||||
"description" : deferred(items_table.c.description),
|
||||
"traits" : relation(Traits,
|
||||
primaryjoin = traits_table.c.typeID == items_table.c.typeID,
|
||||
uselist = False)
|
||||
})
|
||||
|
||||
Item.category = association_proxy("group", "category")
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
@@ -15,29 +15,26 @@
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
|
||||
from sqlalchemy import Column, String, Integer, Boolean, ForeignKey, Table
|
||||
from sqlalchemy.orm import relation, mapper, synonym, deferred
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.gamedata import Item, MarketGroup
|
||||
from eos.types import Item, MarketGroup, Icon
|
||||
|
||||
marketgroups_table = Table("invmarketgroups", gamedata_meta,
|
||||
Column("marketGroupID", Integer, primary_key=True),
|
||||
Column("marketGroupID", Integer, primary_key = True),
|
||||
Column("marketGroupName", String),
|
||||
Column("description", String),
|
||||
Column("hasTypes", Boolean),
|
||||
Column("parentGroupID", Integer,
|
||||
ForeignKey("invmarketgroups.marketGroupID", initially="DEFERRED", deferrable=True)),
|
||||
Column("iconID", Integer))
|
||||
Column("parentGroupID", Integer, ForeignKey("invmarketgroups.marketGroupID", initially="DEFERRED", deferrable=True)),
|
||||
Column("iconID", Integer, ForeignKey("icons.iconID")))
|
||||
|
||||
mapper(MarketGroup, marketgroups_table,
|
||||
properties={
|
||||
"items" : relation(Item, backref="marketGroup"),
|
||||
"parent" : relation(MarketGroup, backref="children",
|
||||
remote_side=[marketgroups_table.c.marketGroupID]),
|
||||
"ID" : synonym("marketGroupID"),
|
||||
"name" : synonym("marketGroupName"),
|
||||
"description": deferred(marketgroups_table.c.description)
|
||||
})
|
||||
properties = {"items" : relation(Item, backref = "marketGroup"),
|
||||
"parent" : relation(MarketGroup, backref = "children", remote_side = [marketgroups_table.c.marketGroupID]),
|
||||
"icon" : relation(Icon),
|
||||
"ID" : synonym("marketGroupID"),
|
||||
"name" : synonym("marketGroupName"),
|
||||
"description" : deferred(marketgroups_table.c.description)})
|
||||
|
||||
@@ -1,30 +1,29 @@
|
||||
# ===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
#
|
||||
# eos is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# eos is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Column, Table, String
|
||||
from sqlalchemy.orm import mapper
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.gamedata import MetaData
|
||||
|
||||
metadata_table = Table("metadata", gamedata_meta,
|
||||
Column("field_name", String, primary_key=True),
|
||||
Column("field_value", String))
|
||||
|
||||
mapper(MetaData, metadata_table)
|
||||
#===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
#
|
||||
# eos is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# eos is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
#===============================================================================
|
||||
|
||||
from sqlalchemy import Column, Table, String
|
||||
from sqlalchemy.orm import mapper
|
||||
from eos.types import MetaData
|
||||
from eos.db import gamedata_meta
|
||||
|
||||
metadata_table = Table("metadata", gamedata_meta,
|
||||
Column("field_name", String, primary_key=True),
|
||||
Column("field_value", String))
|
||||
|
||||
mapper(MetaData, metadata_table)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
@@ -15,37 +15,33 @@
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, String
|
||||
from sqlalchemy.ext.associationproxy import association_proxy
|
||||
from sqlalchemy.orm import relation, mapper, synonym
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.db.gamedata.item import items_table
|
||||
from eos.gamedata import Item, MetaGroup, MetaType
|
||||
from eos.types import MetaGroup, Item, MetaType
|
||||
from sqlalchemy.ext.associationproxy import association_proxy
|
||||
|
||||
metagroups_table = Table("invmetagroups", gamedata_meta,
|
||||
Column("metaGroupID", Integer, primary_key=True),
|
||||
Column("metaGroupID", Integer, primary_key = True),
|
||||
Column("metaGroupName", String))
|
||||
|
||||
metatypes_table = Table("invmetatypes", gamedata_meta,
|
||||
Column("typeID", Integer, ForeignKey("invtypes.typeID"), primary_key=True),
|
||||
Column("typeID", Integer, ForeignKey("invtypes.typeID"), primary_key = True),
|
||||
Column("parentTypeID", Integer, ForeignKey("invtypes.typeID")),
|
||||
Column("metaGroupID", Integer, ForeignKey("invmetagroups.metaGroupID")))
|
||||
|
||||
mapper(MetaGroup, metagroups_table,
|
||||
properties={
|
||||
"ID" : synonym("metaGroupID"),
|
||||
"name": synonym("metaGroupName")
|
||||
})
|
||||
properties = {"ID" : synonym("metaGroupID"),
|
||||
"name" : synonym("metaGroupName")})
|
||||
|
||||
mapper(MetaType, metatypes_table,
|
||||
properties={
|
||||
"ID" : synonym("metaGroupID"),
|
||||
"parent": relation(Item, primaryjoin=metatypes_table.c.parentTypeID == items_table.c.typeID),
|
||||
"items" : relation(Item, primaryjoin=metatypes_table.c.typeID == items_table.c.typeID),
|
||||
"info" : relation(MetaGroup, lazy=False)
|
||||
})
|
||||
properties = {"ID" : synonym("metaGroupID"),
|
||||
"parent" : relation(Item, primaryjoin = metatypes_table.c.parentTypeID == items_table.c.typeID),
|
||||
"items" : relation(Item, primaryjoin = metatypes_table.c.typeID == items_table.c.typeID),
|
||||
"info": relation(MetaGroup, lazy=False)})
|
||||
|
||||
MetaType.name = association_proxy("info", "name")
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
@@ -15,24 +15,21 @@
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
|
||||
from sqlalchemy.inspection import inspect
|
||||
from sqlalchemy.orm import aliased, exc, join
|
||||
from sqlalchemy.sql import and_, or_, select
|
||||
|
||||
import eos.config
|
||||
from eos.db import gamedata_session
|
||||
from eos.db.gamedata.group import groups_table
|
||||
from eos.db.gamedata.metaGroup import items_table, metatypes_table
|
||||
from eos.db.gamedata.metaGroup import metatypes_table, items_table
|
||||
from sqlalchemy.sql import and_, or_, select, func
|
||||
from sqlalchemy.orm import join, exc
|
||||
from eos.types import Item, Category, Group, MarketGroup, AttributeInfo, MetaData, MetaGroup
|
||||
from eos.db.util import processEager, processWhere
|
||||
from eos.gamedata import AlphaClone, Attribute, AttributeInfo, Category, DynamicItem, Group, Item, MarketGroup, MetaData, MetaGroup
|
||||
import eos.config
|
||||
|
||||
cache = {}
|
||||
configVal = getattr(eos.config, "gamedataCache", None)
|
||||
if configVal is True:
|
||||
def cachedQuery(amount, *keywords):
|
||||
def deco(function):
|
||||
cache = {}
|
||||
def checkAndReturn(*args, **kwargs):
|
||||
useCache = kwargs.pop("useCache", True)
|
||||
cacheKey = []
|
||||
@@ -48,7 +45,6 @@ if configVal is True:
|
||||
return handler
|
||||
|
||||
return checkAndReturn
|
||||
|
||||
return deco
|
||||
|
||||
elif callable(configVal):
|
||||
@@ -60,10 +56,8 @@ else:
|
||||
return function(*args, **kwargs)
|
||||
|
||||
return checkAndReturn
|
||||
|
||||
return deco
|
||||
|
||||
|
||||
def sqlizeString(line):
|
||||
# Escape backslashes first, as they will be as escape symbol in queries
|
||||
# Then escape percent and underscore signs
|
||||
@@ -71,10 +65,7 @@ def sqlizeString(line):
|
||||
line = line.replace("\\", "\\\\").replace("%", "\\%").replace("_", "\\_").replace("*", "%")
|
||||
return line
|
||||
|
||||
|
||||
itemNameMap = {}
|
||||
|
||||
|
||||
@cachedQuery(1, "lookfor")
|
||||
def getItem(lookfor, eager=None):
|
||||
if isinstance(lookfor, int):
|
||||
@@ -82,7 +73,7 @@ def getItem(lookfor, eager=None):
|
||||
item = gamedata_session.query(Item).get(lookfor)
|
||||
else:
|
||||
item = gamedata_session.query(Item).options(*processEager(eager)).filter(Item.ID == lookfor).first()
|
||||
elif isinstance(lookfor, str):
|
||||
elif isinstance(lookfor, basestring):
|
||||
if lookfor in itemNameMap:
|
||||
id = itemNameMap[lookfor]
|
||||
if eager is None:
|
||||
@@ -97,87 +88,7 @@ def getItem(lookfor, eager=None):
|
||||
raise TypeError("Need integer or string as argument")
|
||||
return item
|
||||
|
||||
|
||||
def getMutaplasmid(lookfor, eager=None):
|
||||
if isinstance(lookfor, int):
|
||||
item = gamedata_session.query(DynamicItem).filter(DynamicItem.ID == lookfor).first()
|
||||
else:
|
||||
raise TypeError("Need integer as argument")
|
||||
return item
|
||||
|
||||
|
||||
def getItemWithBaseItemAttribute(lookfor, baseItemID, eager=None):
|
||||
# A lot of this is described in more detail in #1597
|
||||
item = gamedata_session.query(Item).get(lookfor)
|
||||
base = getItem(baseItemID)
|
||||
|
||||
# we have to load all attributes for this object, otherwise we'll lose access to them when we expunge.
|
||||
# todo: figure out a way to eagerly load all these via the query...
|
||||
for x in [*inspect(Item).relationships.keys(), 'description']:
|
||||
getattr(item, x)
|
||||
|
||||
# Copy over the attributes from the base, but ise the items attributes when there's an overlap
|
||||
# WARNING: the attribute object still has the old typeID. I don't believe we access this typeID anywhere in the code,
|
||||
# but should keep this in mind for now.
|
||||
item._Item__attributes = {**base.attributes, **item.attributes}
|
||||
|
||||
# Expunge the item form the session. This is required to have different Abyssal / Base combinations loaded in memory.
|
||||
# Without expunging it, once one Abyssal Web is created, SQLAlchmey will use it for all others. We don't want this,
|
||||
# we want to generate a completely new object to work with
|
||||
gamedata_session.expunge(item)
|
||||
return item
|
||||
|
||||
|
||||
@cachedQuery(1, "lookfor")
|
||||
def getItems(lookfor, eager=None):
|
||||
"""
|
||||
Gets a list of items. Does a bit of cache hackery to get working properly -- cache
|
||||
is usually based on function calls with the parameters, needed to extract data directly.
|
||||
Works well enough. Not currently used, but it's here for possible future inclusion
|
||||
"""
|
||||
|
||||
toGet = []
|
||||
results = []
|
||||
|
||||
for id in lookfor:
|
||||
if (id, None) in cache:
|
||||
results.append(cache.get((id, None)))
|
||||
else:
|
||||
toGet.append(id)
|
||||
|
||||
if len(toGet) > 0:
|
||||
# Get items that aren't currently cached, and store them in the cache
|
||||
items = gamedata_session.query(Item).filter(Item.ID.in_(toGet)).all()
|
||||
for item in items:
|
||||
cache[(item.ID, None)] = item
|
||||
results += items
|
||||
|
||||
# sort the results based on the original indexing
|
||||
results.sort(key=lambda x: lookfor.index(x.ID))
|
||||
return results
|
||||
|
||||
|
||||
@cachedQuery(1, "lookfor")
|
||||
def getAlphaClone(lookfor, eager=None):
|
||||
if isinstance(lookfor, int):
|
||||
if eager is None:
|
||||
item = gamedata_session.query(AlphaClone).get(lookfor)
|
||||
else:
|
||||
item = gamedata_session.query(AlphaClone).options(*processEager(eager)).filter(AlphaClone.ID == lookfor).first()
|
||||
else:
|
||||
raise TypeError("Need integer as argument")
|
||||
return item
|
||||
|
||||
|
||||
def getAlphaCloneList(eager=None):
|
||||
eager = processEager(eager)
|
||||
clones = gamedata_session.query(AlphaClone).options(*eager).all()
|
||||
return clones
|
||||
|
||||
|
||||
groupNameMap = {}
|
||||
|
||||
|
||||
@cachedQuery(1, "lookfor")
|
||||
def getGroup(lookfor, eager=None):
|
||||
if isinstance(lookfor, int):
|
||||
@@ -185,7 +96,7 @@ def getGroup(lookfor, eager=None):
|
||||
group = gamedata_session.query(Group).get(lookfor)
|
||||
else:
|
||||
group = gamedata_session.query(Group).options(*processEager(eager)).filter(Group.ID == lookfor).first()
|
||||
elif isinstance(lookfor, str):
|
||||
elif isinstance(lookfor, basestring):
|
||||
if lookfor in groupNameMap:
|
||||
id = groupNameMap[lookfor]
|
||||
if eager is None:
|
||||
@@ -200,104 +111,78 @@ def getGroup(lookfor, eager=None):
|
||||
raise TypeError("Need integer or string as argument")
|
||||
return group
|
||||
|
||||
|
||||
categoryNameMap = {}
|
||||
|
||||
|
||||
@cachedQuery(1, "lookfor")
|
||||
def getCategory(lookfor, eager=None):
|
||||
if isinstance(lookfor, int):
|
||||
if eager is None:
|
||||
category = gamedata_session.query(Category).get(lookfor)
|
||||
else:
|
||||
category = gamedata_session.query(Category).options(*processEager(eager)).filter(
|
||||
Category.ID == lookfor).first()
|
||||
elif isinstance(lookfor, str):
|
||||
category = gamedata_session.query(Category).options(*processEager(eager)).filter(Category.ID == lookfor).first()
|
||||
elif isinstance(lookfor, basestring):
|
||||
if lookfor in categoryNameMap:
|
||||
id = categoryNameMap[lookfor]
|
||||
if eager is None:
|
||||
category = gamedata_session.query(Category).get(id)
|
||||
else:
|
||||
category = gamedata_session.query(Category).options(*processEager(eager)).filter(
|
||||
Category.ID == id).first()
|
||||
category = gamedata_session.query(Category).options(*processEager(eager)).filter(Category.ID == id).first()
|
||||
else:
|
||||
# Category names are unique, so we can use first() instead of one()
|
||||
category = gamedata_session.query(Category).options(*processEager(eager)).filter(
|
||||
Category.name == lookfor).first()
|
||||
category = gamedata_session.query(Category).options(*processEager(eager)).filter(Category.name == lookfor).first()
|
||||
categoryNameMap[lookfor] = category.ID
|
||||
else:
|
||||
raise TypeError("Need integer or string as argument")
|
||||
return category
|
||||
|
||||
|
||||
metaGroupNameMap = {}
|
||||
|
||||
|
||||
@cachedQuery(1, "lookfor")
|
||||
def getMetaGroup(lookfor, eager=None):
|
||||
if isinstance(lookfor, int):
|
||||
if eager is None:
|
||||
metaGroup = gamedata_session.query(MetaGroup).get(lookfor)
|
||||
else:
|
||||
metaGroup = gamedata_session.query(MetaGroup).options(*processEager(eager)).filter(
|
||||
MetaGroup.ID == lookfor).first()
|
||||
elif isinstance(lookfor, str):
|
||||
metaGroup = gamedata_session.query(MetaGroup).options(*processEager(eager)).filter(MetaGroup.ID == lookfor).first()
|
||||
elif isinstance(lookfor, basestring):
|
||||
if lookfor in metaGroupNameMap:
|
||||
id = metaGroupNameMap[lookfor]
|
||||
if eager is None:
|
||||
metaGroup = gamedata_session.query(MetaGroup).get(id)
|
||||
else:
|
||||
metaGroup = gamedata_session.query(MetaGroup).options(*processEager(eager)).filter(
|
||||
MetaGroup.ID == id).first()
|
||||
metaGroup = gamedata_session.query(MetaGroup).options(*processEager(eager)).filter(MetaGroup.ID == id).first()
|
||||
else:
|
||||
# MetaGroup names are unique, so we can use first() instead of one()
|
||||
metaGroup = gamedata_session.query(MetaGroup).options(*processEager(eager)).filter(
|
||||
MetaGroup.name == lookfor).first()
|
||||
metaGroup = gamedata_session.query(MetaGroup).options(*processEager(eager)).filter(MetaGroup.name == lookfor).first()
|
||||
metaGroupNameMap[lookfor] = metaGroup.ID
|
||||
else:
|
||||
raise TypeError("Need integer or string as argument")
|
||||
return metaGroup
|
||||
|
||||
|
||||
@cachedQuery(1, "lookfor")
|
||||
def getMarketGroup(lookfor, eager=None):
|
||||
if isinstance(lookfor, int):
|
||||
if eager is None:
|
||||
marketGroup = gamedata_session.query(MarketGroup).get(lookfor)
|
||||
else:
|
||||
marketGroup = gamedata_session.query(MarketGroup).options(*processEager(eager)).filter(
|
||||
MarketGroup.ID == lookfor).first()
|
||||
marketGroup = gamedata_session.query(MarketGroup).options(*processEager(eager)).filter(MarketGroup.ID == lookfor).first()
|
||||
else:
|
||||
raise TypeError("Need integer as argument")
|
||||
return marketGroup
|
||||
|
||||
|
||||
def getMarketTreeNodeIds(rootNodeIds):
|
||||
allIds = set()
|
||||
addedIds = set(rootNodeIds)
|
||||
while addedIds:
|
||||
allIds.update(addedIds)
|
||||
addedIds = {mg.ID for mg in gamedata_session.query(MarketGroup).filter(MarketGroup.parentGroupID.in_(addedIds))}
|
||||
return allIds
|
||||
|
||||
|
||||
@cachedQuery(2, "where", "filter")
|
||||
def getItemsByCategory(filter, where=None, eager=None):
|
||||
if isinstance(filter, int):
|
||||
filter = Category.ID == filter
|
||||
elif isinstance(filter, str):
|
||||
elif isinstance(filter, basestring):
|
||||
filter = Category.name == filter
|
||||
else:
|
||||
raise TypeError("Need integer or string as argument")
|
||||
|
||||
filter = processWhere(filter, where)
|
||||
return gamedata_session.query(Item).options(*processEager(eager)).join(Item.group, Group.category).filter(
|
||||
filter).all()
|
||||
|
||||
return gamedata_session.query(Item).options(*processEager(eager)).join(Item.group, Group.category).filter(filter).all()
|
||||
|
||||
@cachedQuery(3, "where", "nameLike", "join")
|
||||
def searchItems(nameLike, where=None, join=None, eager=None):
|
||||
if not isinstance(nameLike, str):
|
||||
if not isinstance(nameLike, basestring):
|
||||
raise TypeError("Need string as argument")
|
||||
|
||||
if join is None:
|
||||
@@ -308,33 +193,13 @@ def searchItems(nameLike, where=None, join=None, eager=None):
|
||||
|
||||
items = gamedata_session.query(Item).options(*processEager(eager)).join(*join)
|
||||
for token in nameLike.split(' '):
|
||||
token_safe = "%{0}%".format(sqlizeString(token))
|
||||
if where is not None:
|
||||
items = items.filter(and_(Item.name.like(token_safe, escape="\\"), where))
|
||||
else:
|
||||
items = items.filter(Item.name.like(token_safe, escape="\\"))
|
||||
token_safe = u"%{0}%".format(sqlizeString(token))
|
||||
items = items.filter(processWhere(Item.name.like(token_safe, escape="\\"), where))
|
||||
items = items.limit(100).all()
|
||||
return items
|
||||
|
||||
|
||||
@cachedQuery(3, "where", "nameLike", "join")
|
||||
def searchSkills(nameLike, where=None, eager=None):
|
||||
if not isinstance(nameLike, str):
|
||||
raise TypeError("Need string as argument")
|
||||
|
||||
items = gamedata_session.query(Item).options(*processEager(eager)).join(Item.group, Group.category)
|
||||
for token in nameLike.split(' '):
|
||||
token_safe = "%{0}%".format(sqlizeString(token))
|
||||
if where is not None:
|
||||
items = items.filter(and_(Item.name.like(token_safe, escape="\\"), Category.ID == 16, where))
|
||||
else:
|
||||
items = items.filter(and_(Item.name.like(token_safe, escape="\\"), Category.ID == 16))
|
||||
items = items.limit(100).all()
|
||||
return items
|
||||
|
||||
|
||||
@cachedQuery(2, "where", "itemids")
|
||||
def getVariations(itemids, groupIDs=None, where=None, eager=None):
|
||||
def getVariations(itemids, where=None, eager=None):
|
||||
for itemid in itemids:
|
||||
if not isinstance(itemid, int):
|
||||
raise TypeError("All passed item IDs must be integers")
|
||||
@@ -345,24 +210,12 @@ def getVariations(itemids, groupIDs=None, where=None, eager=None):
|
||||
itemfilter = or_(*(metatypes_table.c.parentTypeID == itemid for itemid in itemids))
|
||||
filter = processWhere(itemfilter, where)
|
||||
joinon = items_table.c.typeID == metatypes_table.c.typeID
|
||||
vars = gamedata_session.query(Item).options(*processEager(eager)).join((metatypes_table, joinon)).filter(
|
||||
filter).all()
|
||||
|
||||
if vars:
|
||||
return vars
|
||||
elif groupIDs:
|
||||
itemfilter = or_(*(groups_table.c.groupID == groupID for groupID in groupIDs))
|
||||
filter = processWhere(itemfilter, where)
|
||||
joinon = items_table.c.groupID == groups_table.c.groupID
|
||||
vars = gamedata_session.query(Item).options(*processEager(eager)).join((groups_table, joinon)).filter(
|
||||
filter).all()
|
||||
|
||||
vars = gamedata_session.query(Item).options(*processEager(eager)).join((metatypes_table, joinon)).filter(filter).all()
|
||||
return vars
|
||||
|
||||
|
||||
@cachedQuery(1, "attr")
|
||||
def getAttributeInfo(attr, eager=None):
|
||||
if isinstance(attr, str):
|
||||
if isinstance(attr, basestring):
|
||||
filter = AttributeInfo.name == attr
|
||||
elif isinstance(attr, int):
|
||||
filter = AttributeInfo.ID == attr
|
||||
@@ -374,16 +227,14 @@ def getAttributeInfo(attr, eager=None):
|
||||
result = None
|
||||
return result
|
||||
|
||||
|
||||
@cachedQuery(1, "field")
|
||||
def getMetaData(field):
|
||||
if isinstance(field, str):
|
||||
if isinstance(field, basestring):
|
||||
data = gamedata_session.query(MetaData).get(field)
|
||||
else:
|
||||
raise TypeError("Need string as argument")
|
||||
return data
|
||||
|
||||
|
||||
@cachedQuery(2, "itemIDs", "attributeID")
|
||||
def directAttributeRequest(itemIDs, attrIDs):
|
||||
for itemID in itemIDs:
|
||||
@@ -393,50 +244,9 @@ def directAttributeRequest(itemIDs, attrIDs):
|
||||
if not isinstance(itemID, int):
|
||||
raise TypeError("All itemIDs must be integer")
|
||||
|
||||
q = select((Item.typeID, Attribute.attributeID, Attribute.value),
|
||||
and_(Attribute.attributeID.in_(attrIDs), Item.typeID.in_(itemIDs)),
|
||||
from_obj=[join(Attribute, Item)])
|
||||
q = select((eos.types.Item.typeID, eos.types.Attribute.attributeID, eos.types.Attribute.value),
|
||||
and_(eos.types.Attribute.attributeID.in_(attrIDs), eos.types.Item.typeID.in_(itemIDs)),
|
||||
from_obj=[join(eos.types.Attribute, eos.types.Item)])
|
||||
|
||||
result = gamedata_session.execute(q).fetchall()
|
||||
return result
|
||||
|
||||
|
||||
def getAbyssalTypes():
|
||||
return set([r.resultingTypeID for r in gamedata_session.query(DynamicItem.resultingTypeID).distinct()])
|
||||
|
||||
|
||||
@cachedQuery(1, "itemID")
|
||||
def getDynamicItem(itemID, eager=None):
|
||||
try:
|
||||
if isinstance(itemID, int):
|
||||
if eager is None:
|
||||
result = gamedata_session.query(DynamicItem).filter(DynamicItem.ID == itemID).one()
|
||||
else:
|
||||
result = gamedata_session.query(DynamicItem).options(*processEager(eager)).filter(DynamicItem.ID == itemID).one()
|
||||
else:
|
||||
raise TypeError("Need integer as argument")
|
||||
except exc.NoResultFound:
|
||||
result = None
|
||||
return result
|
||||
|
||||
|
||||
def getRequiredFor(itemID, attrMapping):
|
||||
Attribute1 = aliased(Attribute)
|
||||
Attribute2 = aliased(Attribute)
|
||||
|
||||
skillToLevelClauses = []
|
||||
|
||||
for attrSkill, attrLevel in attrMapping.items():
|
||||
skillToLevelClauses.append(and_(Attribute1.attributeID == attrSkill, Attribute2.attributeID == attrLevel))
|
||||
|
||||
queryOr = or_(*skillToLevelClauses)
|
||||
|
||||
q = select((Attribute2.typeID, Attribute2.value),
|
||||
and_(Attribute1.value == itemID, queryOr),
|
||||
from_obj=[
|
||||
join(Attribute1, Attribute2, Attribute1.typeID == Attribute2.typeID)
|
||||
])
|
||||
|
||||
result = gamedata_session.execute(q).fetchall()
|
||||
|
||||
return result
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
|
||||
from sqlalchemy import Column, Table, Integer, String, ForeignKey
|
||||
from sqlalchemy.orm import mapper
|
||||
|
||||
from eos.types import Traits
|
||||
from eos.db import gamedata_meta
|
||||
from eos.gamedata import Traits
|
||||
|
||||
traits_table = Table("invtraits", gamedata_meta,
|
||||
Column("typeID", Integer, ForeignKey("invtypes.typeID"), primary_key=True),
|
||||
Column("traitText", String))
|
||||
|
||||
mapper(Traits, traits_table)
|
||||
mapper(Traits, traits_table);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
@@ -15,21 +15,19 @@
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
|
||||
from sqlalchemy import Column, Table, Integer, String
|
||||
from sqlalchemy.orm import mapper, synonym
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.gamedata import Unit
|
||||
from eos.types import Unit
|
||||
|
||||
groups_table = Table("dgmunits", gamedata_meta,
|
||||
Column("unitID", Integer, primary_key=True),
|
||||
Column("unitID", Integer, primary_key = True),
|
||||
Column("unitName", String),
|
||||
Column("displayName", String))
|
||||
|
||||
mapper(Unit, groups_table,
|
||||
properties={
|
||||
"ID" : synonym("unitID"),
|
||||
"name": synonym("unitName")
|
||||
})
|
||||
properties = {"ID" : synonym("unitID"),
|
||||
"name" : synonym("unitName")})
|
||||
|
||||
@@ -1,44 +1,32 @@
|
||||
from logbook import Logger
|
||||
import config
|
||||
import shutil
|
||||
import time
|
||||
|
||||
import config
|
||||
from . import migrations
|
||||
|
||||
pyfalog = Logger(__name__)
|
||||
|
||||
|
||||
def getVersion(db):
|
||||
cursor = db.execute('PRAGMA user_version')
|
||||
return cursor.fetchone()[0]
|
||||
|
||||
|
||||
def getAppVersion():
|
||||
return migrations.appVersion
|
||||
|
||||
|
||||
def update(saveddata_engine):
|
||||
dbVersion = getVersion(saveddata_engine)
|
||||
appVersion = getAppVersion()
|
||||
currversion = getVersion(saveddata_engine)
|
||||
|
||||
if dbVersion == appVersion:
|
||||
if currversion == config.dbversion:
|
||||
return
|
||||
|
||||
if dbVersion < appVersion:
|
||||
if currversion < config.dbversion:
|
||||
# Automatically backup database
|
||||
toFile = "%s/saveddata_migration_%d-%d_%s.db" % (
|
||||
toFile = "%s/saveddata_migration_%d-%d_%s.db"%(
|
||||
config.savePath,
|
||||
dbVersion,
|
||||
appVersion,
|
||||
currversion,
|
||||
config.dbversion,
|
||||
time.strftime("%Y%m%d_%H%M%S"))
|
||||
|
||||
shutil.copyfile(config.saveDB, toFile)
|
||||
|
||||
for version in range(dbVersion, appVersion):
|
||||
func = migrations.updates[version + 1]
|
||||
if func:
|
||||
pyfalog.info("Applying database update: {0}", version + 1)
|
||||
func(saveddata_engine)
|
||||
for version in xrange(currversion, config.dbversion):
|
||||
module = __import__('eos.db.migrations.upgrade%d'%(version+1), fromlist=True)
|
||||
upgrade = getattr(module, "upgrade", False)
|
||||
if upgrade:
|
||||
upgrade(saveddata_engine)
|
||||
|
||||
# when all is said and done, set version to current
|
||||
saveddata_engine.execute("PRAGMA user_version = {}".format(appVersion))
|
||||
saveddata_engine.execute('PRAGMA user_version = %d'%config.dbversion)
|
||||
|
||||
@@ -7,44 +7,3 @@ define an upgrade() function with the logic. Please note that there must be as
|
||||
many upgrade files as there are database versions (version 5 would include
|
||||
upgrade files 1-5)
|
||||
"""
|
||||
|
||||
import pkgutil
|
||||
import re
|
||||
|
||||
updates = {}
|
||||
appVersion = 0
|
||||
|
||||
prefix = __name__ + "."
|
||||
|
||||
# load modules to work based with and without pyinstaller
|
||||
# from: https://github.com/webcomics/dosage/blob/master/dosagelib/loader.py
|
||||
# see: https://github.com/pyinstaller/pyinstaller/issues/1905
|
||||
|
||||
# load modules using iter_modules()
|
||||
# (should find all filters in normal build, but not pyinstaller)
|
||||
module_names = [m[1] for m in pkgutil.iter_modules(__path__, prefix)]
|
||||
|
||||
# special handling for PyInstaller
|
||||
importers = map(pkgutil.get_importer, __path__)
|
||||
toc = set()
|
||||
for i in importers:
|
||||
if hasattr(i, 'toc'):
|
||||
toc |= i.toc
|
||||
|
||||
for elm in toc:
|
||||
if elm.startswith(prefix):
|
||||
module_names.append(elm)
|
||||
|
||||
for modname in module_names:
|
||||
# loop through python files, extracting update number and function, and
|
||||
# adding it to a list
|
||||
modname_tail = modname.rsplit('.', 1)[-1]
|
||||
module = __import__(modname, fromlist=True)
|
||||
m = re.match("^upgrade(?P<index>\d+)$", modname_tail)
|
||||
if not m:
|
||||
continue
|
||||
index = int(m.group("index"))
|
||||
appVersion = max(appVersion, index)
|
||||
upgrade = getattr(module, "upgrade", False)
|
||||
if upgrade:
|
||||
updates[index] = upgrade
|
||||
|
||||
@@ -14,50 +14,50 @@ Migration 1
|
||||
import sqlalchemy
|
||||
|
||||
CONVERSIONS = {
|
||||
6135 : [ # Scoped Cargo Scanner
|
||||
6135: [ # Scoped Cargo Scanner
|
||||
6133, # Interior Type-E Cargo Identifier
|
||||
],
|
||||
6527 : [ # Compact Ship Scanner
|
||||
6527: [ # Compact Ship Scanner
|
||||
6525, # Ta3 Perfunctory Vessel Probe
|
||||
6529, # Speculative Ship Identifier I
|
||||
6531, # Practical Type-E Ship Probe
|
||||
],
|
||||
6569 : [ # Scoped Survey Scanner
|
||||
6569: [ # Scoped Survey Scanner
|
||||
6567, # ML-3 Amphilotite Mining Probe
|
||||
6571, # Rock-Scanning Sensor Array I
|
||||
6573, # 'Dactyl' Type-E Asteroid Analyzer
|
||||
],
|
||||
509 : [ # 'Basic' Capacitor Flux Coil
|
||||
509: [ # 'Basic' Capacitor Flux Coil
|
||||
8163, # Partial Power Plant Manager: Capacitor Flux
|
||||
8165, # Alpha Reactor Control: Capacitor Flux
|
||||
8167, # Type-E Power Core Modification: Capacitor Flux
|
||||
8169, # Marked Generator Refitting: Capacitor Flux
|
||||
],
|
||||
8135 : [ # Restrained Capacitor Flux Coil
|
||||
8135: [ # Restrained Capacitor Flux Coil
|
||||
8131, # Local Power Plant Manager: Capacitor Flux I
|
||||
],
|
||||
8133 : [ # Compact Capacitor Flux Coil
|
||||
8133: [ # Compact Capacitor Flux Coil
|
||||
8137, # Mark I Generator Refitting: Capacitor Flux
|
||||
],
|
||||
3469 : [ # Basic Co-Processor
|
||||
3469: [ # Basic Co-Processor
|
||||
8744, # Nanoelectrical Co-Processor
|
||||
8743, # Nanomechanical CPU Enhancer
|
||||
8746, # Quantum Co-Processor
|
||||
8745, # Photonic CPU Enhancer
|
||||
15425, # Naiyon's Modified Co-Processor (never existed but convert
|
||||
# anyway as some fits may include it)
|
||||
# anyway as some fits may include it)
|
||||
],
|
||||
8748 : [ # Upgraded Co-Processor
|
||||
8748: [ # Upgraded Co-Processor
|
||||
8747, # Nanomechanical CPU Enhancer I
|
||||
8750, # Quantum Co-Processor I
|
||||
8749, # Photonic CPU Enhancer I
|
||||
],
|
||||
1351 : [ # Basic Reactor Control Unit
|
||||
1351: [ # Basic Reactor Control Unit
|
||||
8251, # Partial Power Plant Manager: Reaction Control
|
||||
8253, # Alpha Reactor Control: Reaction Control
|
||||
8257, # Marked Generator Refitting: Reaction Control
|
||||
],
|
||||
8263 : [ # Compact Reactor Control Unit
|
||||
8263: [ # Compact Reactor Control Unit
|
||||
8259, # Local Power Plant Manager: Reaction Control I
|
||||
8265, # Mark I Generator Refitting: Reaction Control
|
||||
8261, # Beta Reactor Control: Reaction Control I
|
||||
@@ -69,20 +69,19 @@ CONVERSIONS = {
|
||||
31936: [ # Navy Micro Auxiliary Power Core
|
||||
16543, # Micro 'Vigor' Core Augmentation
|
||||
],
|
||||
8089 : [ # Compact Light Missile Launcher
|
||||
8093, # Prototype 'Arbalest' Light Missile Launcher
|
||||
8089: [ # Compact Light Missile Launcher
|
||||
8093, # Prototype 'Arbalest' Light Missile Launcher
|
||||
],
|
||||
8091 : [ # Ample Light Missile Launcher
|
||||
8091: [ # Ample Light Missile Launcher
|
||||
7993, # Experimental TE-2100 Light Missile Launcher
|
||||
],
|
||||
# Surface Cargo Scanner I was removed from game, however no mention of
|
||||
# replacement module in patch notes. Morphing it to meta 0 module to be safe
|
||||
442 : [ # Cargo Scanner I
|
||||
442: [ # Cargo Scanner I
|
||||
6129, # Surface Cargo Scanner I
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Update fits schema to include target resists attribute
|
||||
try:
|
||||
@@ -91,9 +90,8 @@ def upgrade(saveddata_engine):
|
||||
saveddata_engine.execute("ALTER TABLE fits ADD COLUMN targetResistsID INTEGER;")
|
||||
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for replacement_item, list in CONVERSIONS.iteritems():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "cargo" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?', (replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "cargo" SET "itemID" = ? WHERE "itemID" = ?', (replacement_item, retired_item))
|
||||
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
"""
|
||||
Migration 10
|
||||
|
||||
- Adds active attribute to projected fits
|
||||
"""
|
||||
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Update projectedFits schema to include active attribute
|
||||
try:
|
||||
saveddata_engine.execute("SELECT active FROM projectedFits LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE projectedFits ADD COLUMN active BOOLEAN")
|
||||
saveddata_engine.execute("UPDATE projectedFits SET active = 1")
|
||||
saveddata_engine.execute("UPDATE projectedFits SET amount = 1")
|
||||
@@ -1,116 +0,0 @@
|
||||
"""
|
||||
Migration 11
|
||||
|
||||
- Converts modules based on December Release 2015 Tiericide
|
||||
Some modules have been unpublished (and unpublished module attributes are removed
|
||||
from database), which causes pyfa to crash. We therefore replace these
|
||||
modules with their new replacements
|
||||
"""
|
||||
|
||||
CONVERSIONS = {
|
||||
16467: ( # Medium Gremlin Compact Energy Neutralizer
|
||||
16471, # Medium Unstable Power Fluctuator I
|
||||
),
|
||||
22947: ( # 'Beatnik' Small Remote Armor Repairer
|
||||
23414, # 'Brotherhood' Small Remote Armor Repairer
|
||||
),
|
||||
8295 : ( # Type-D Restrained Shield Flux Coil
|
||||
8293, # Beta Reactor Control: Shield Flux I
|
||||
),
|
||||
16499: ( # Heavy Knave Scoped Energy Nosferatu
|
||||
16501, # E500 Prototype Energy Vampire
|
||||
),
|
||||
16477: ( # Heavy Infectious Scoped Energy Neutralizer
|
||||
16473, # Heavy Rudimentary Energy Destabilizer I
|
||||
),
|
||||
16475: ( # Heavy Gremlin Compact Energy Neutralizer
|
||||
16479, # Heavy Unstable Power Fluctuator I
|
||||
),
|
||||
16447: ( # Medium Solace Scoped Remote Armor Repairer
|
||||
16445, # Medium 'Arup' Remote Armor Repairer
|
||||
),
|
||||
508 : ( # 'Basic' Shield Flux Coil
|
||||
8325, # Alpha Reactor Shield Flux
|
||||
8329, # Marked Generator Refitting: Shield Flux
|
||||
8323, # Partial Power Plant Manager: Shield Flux
|
||||
8327, # Type-E Power Core Modification: Shield Flux
|
||||
),
|
||||
1419 : ( # 'Basic' Shield Power Relay
|
||||
8341, # Alpha Reactor Shield Power Relay
|
||||
8345, # Marked Generator Refitting: Shield Power Relay
|
||||
8339, # Partial Power Plant Manager: Shield Power Relay
|
||||
8343, # Type-E Power Core Modification: Shield Power Relay
|
||||
),
|
||||
16439: ( # Small Solace Scoped Remote Armor Repairer
|
||||
16437, # Small 'Arup' Remote Armor Repairer
|
||||
),
|
||||
16505: ( # Medium Ghoul Compact Energy Nosferatu
|
||||
16511, # Medium Diminishing Power System Drain I
|
||||
),
|
||||
8297 : ( # Mark I Compact Shield Flux Coil
|
||||
8291, # Local Power Plant Manager: Reaction Shield Flux I
|
||||
),
|
||||
16455: ( # Large Solace Scoped Remote Armor Repairer
|
||||
16453, # Large 'Arup' Remote Armor Repairer
|
||||
),
|
||||
6485 : ( # M51 Benefactor Compact Shield Recharger
|
||||
6491, # Passive Barrier Compensator I
|
||||
6489, # 'Benefactor' Ward Reconstructor
|
||||
6487, # Supplemental Screen Generator I
|
||||
),
|
||||
5137 : ( # Small Knave Scoped Energy Nosferatu
|
||||
5135, # E5 Prototype Energy Vampire
|
||||
),
|
||||
8579 : ( # Medium Murky Compact Remote Shield Booster
|
||||
8581, # Medium 'Atonement' Remote Shield Booster
|
||||
),
|
||||
8531 : ( # Small Murky Compact Remote Shield Booster
|
||||
8533, # Small 'Atonement' Remote Shield Booster
|
||||
),
|
||||
16497: ( # Heavy Ghoul Compact Energy Nosferatu
|
||||
16503, # Heavy Diminishing Power System Drain I
|
||||
),
|
||||
4477 : ( # Small Gremlin Compact Energy Neutralizer
|
||||
4475, # Small Unstable Power Fluctuator I
|
||||
),
|
||||
8337 : ( # Mark I Compact Shield Power Relay
|
||||
8331, # Local Power Plant Manager: Reaction Shield Power Relay I
|
||||
),
|
||||
23416: ( # 'Peace' Large Remote Armor Repairer
|
||||
22951, # 'Pacifier' Large Remote Armor Repairer
|
||||
),
|
||||
5141 : ( # Small Ghoul Compact Energy Nosferatu
|
||||
5139, # Small Diminishing Power System Drain I
|
||||
),
|
||||
4471 : ( # Small Infectious Scoped Energy Neutralizer
|
||||
4473, # Small Rudimentary Energy Destabilizer I
|
||||
),
|
||||
16469: ( # Medium Infectious Scoped Energy Neutralizer
|
||||
16465, # Medium Rudimentary Energy Destabilizer I
|
||||
),
|
||||
8335 : ( # Type-D Restrained Shield Power Relay
|
||||
8333, # Beta Reactor Control: Shield Power Relay I
|
||||
),
|
||||
405 : ( # 'Micro' Remote Shield Booster
|
||||
8631, # Micro Asymmetric Remote Shield Booster
|
||||
8627, # Micro Murky Remote Shield Booster
|
||||
8629, # Micro 'Atonement' Remote Shield Booster
|
||||
8633, # Micro S95a Remote Shield Booster
|
||||
),
|
||||
8635 : ( # Large Murky Compact Remote Shield Booster
|
||||
8637, # Large 'Atonement' Remote Shield Booster
|
||||
),
|
||||
16507: ( # Medium Knave Scoped Energy Nosferatu
|
||||
16509, # E50 Prototype Energy Vampire
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "cargo" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
@@ -1,340 +0,0 @@
|
||||
"""
|
||||
Migration 12
|
||||
|
||||
- Converts modules based on March 2016 Module Tiericide
|
||||
Some modules have been unpublished (and unpublished module attributes are removed
|
||||
from database), which causes pyfa to crash. We therefore replace these
|
||||
modules with their new replacements
|
||||
"""
|
||||
|
||||
CONVERSIONS = {
|
||||
16457: ( # Crosslink Compact Ballistic Control System
|
||||
16459, # Muon Coil Bolt Array I
|
||||
16461, # Multiphasic Bolt Array I
|
||||
16463, # 'Pandemonium' Ballistic Enhancement
|
||||
),
|
||||
5281 : ( # Coadjunct Scoped Remote Sensor Booster
|
||||
7218, # Piercing ECCM Emitter I
|
||||
),
|
||||
5365 : ( # Cetus Scoped Burst Jammer
|
||||
5359, # 1Z-3 Subversive ECM Eruption
|
||||
),
|
||||
1973 : ( # Sensor Booster I
|
||||
1947, # ECCM - Radar I
|
||||
2002, # ECCM - Ladar I
|
||||
2003, # ECCM - Magnetometric I
|
||||
2004, # ECCM - Gravimetric I
|
||||
2005, # ECCM - Omni I
|
||||
),
|
||||
1951 : ( # 'Basic' Tracking Enhancer
|
||||
6322, # Beta-Nought Tracking Mode
|
||||
6323, # Azimuth Descalloping Tracking Enhancer
|
||||
6324, # F-AQ Delay-Line Scan Tracking Subroutines
|
||||
6321, # Beam Parallax Tracking Program
|
||||
),
|
||||
521 : ( # 'Basic' Damage Control
|
||||
5829, # GLFF Containment Field
|
||||
5831, # Interior Force Field Array
|
||||
5835, # F84 Local Damage System
|
||||
5833, # Systematic Damage Control
|
||||
),
|
||||
22925: ( # 'Bootleg' Remote Sensor Booster
|
||||
22939, # 'Boss' Remote Sensor Booster
|
||||
22941, # 'Entrepreneur' Remote Sensor Booster
|
||||
),
|
||||
5443 : ( # Faint Epsilon Scoped Warp Scrambler
|
||||
5441, # Fleeting Progressive Warp Scrambler I
|
||||
),
|
||||
1963 : ( # Remote Sensor Booster I
|
||||
1959, # ECCM Projector I
|
||||
),
|
||||
6325 : ( # Fourier Compact Tracking Enhancer
|
||||
6326, # Sigma-Nought Tracking Mode I
|
||||
6327, # Auto-Gain Control Tracking Enhancer I
|
||||
6328, # F-aQ Phase Code Tracking Subroutines
|
||||
),
|
||||
21486: ( # 'Kindred' Gyrostabilizer
|
||||
21488, # Monophonic Stabilization Actuator I
|
||||
),
|
||||
19927: ( # Hypnos Scoped Magnetometric ECM
|
||||
9518, # Initiated Ion Field ECM I
|
||||
),
|
||||
10188: ( # 'Basic' Magnetic Field Stabilizer
|
||||
11111, # Insulated Stabilizer Array
|
||||
11109, # Linear Flux Stabilizer
|
||||
11115, # Gauss Field Balancer
|
||||
11113, # Magnetic Vortex Stabilizer
|
||||
),
|
||||
22919: ( # 'Monopoly' Magnetic Field Stabilizer
|
||||
22917, # 'Capitalist' Magnetic Field Stabilizer I
|
||||
),
|
||||
5839 : ( # IFFA Compact Damage Control
|
||||
5841, # Emergency Damage Control I
|
||||
5843, # F85 Peripheral Damage System I
|
||||
5837, # Pseudoelectron Containment Field I
|
||||
),
|
||||
522 : ( # 'Micro' Cap Battery
|
||||
4747, # Micro Ld-Acid Capacitor Battery I
|
||||
4751, # Micro Ohm Capacitor Reserve I
|
||||
4745, # Micro F-4a Ld-Sulfate Capacitor Charge Unit
|
||||
4749, # Micro Peroxide Capacitor Power Cell
|
||||
3480, # Micro Capacitor Battery II
|
||||
),
|
||||
518 : ( # 'Basic' Gyrostabilizer
|
||||
5915, # Lateral Gyrostabilizer
|
||||
5919, # F-M2 Weapon Inertial Suspensor
|
||||
5913, # Hydraulic Stabilization Actuator
|
||||
5917, # Stabilized Weapon Mounts
|
||||
),
|
||||
19931: ( # Compulsive Scoped Multispectral ECM
|
||||
19933, # 'Hypnos' Multispectral ECM I
|
||||
),
|
||||
5403 : ( # Faint Scoped Warp Disruptor
|
||||
5401, # Fleeting Warp Disruptor I
|
||||
),
|
||||
23902: ( # 'Trebuchet' Heat Sink I
|
||||
23900, # 'Mangonel' Heat Sink I
|
||||
),
|
||||
1893 : ( # 'Basic' Heat Sink
|
||||
5845, # Heat Exhaust System
|
||||
5856, # C3S Convection Thermal Radiator
|
||||
5855, # 'Boreas' Coolant System
|
||||
5854, # Stamped Heat Sink
|
||||
),
|
||||
6160 : ( # F-90 Compact Sensor Booster
|
||||
20214, # Extra Radar ECCM Scanning Array I
|
||||
20220, # Extra Ladar ECCM Scanning Array I
|
||||
20226, # Extra Gravimetric ECCM Scanning Array I
|
||||
20232, # Extra Magnetometric ECCM Scanning Array I
|
||||
7948, # Gravimetric Positional ECCM Sensor System I
|
||||
7964, # Radar Positional ECCM Sensor System I
|
||||
7965, # Omni Positional ECCM Sensor System I
|
||||
7966, # Ladar Positional ECCM Sensor System I
|
||||
7970, # Magnetometric Positional ECCM Sensor System I
|
||||
20218, # Conjunctive Radar ECCM Scanning Array I
|
||||
20224, # Conjunctive Ladar ECCM Scanning Array I
|
||||
20230, # Conjunctive Gravimetric ECCM Scanning Array I
|
||||
20236, # Conjunctive Magnetometric ECCM Scanning Array I
|
||||
6157, # Supplemental Scanning CPU I
|
||||
),
|
||||
23418: ( # 'Radical' Damage Control
|
||||
22893, # 'Gonzo' Damage Control I
|
||||
),
|
||||
19952: ( # Umbra Scoped Radar ECM
|
||||
9520, # 'Penumbra' White Noise ECM
|
||||
),
|
||||
1952 : ( # Sensor Booster II
|
||||
2258, # ECCM - Omni II
|
||||
2259, # ECCM - Gravimetric II
|
||||
2260, # ECCM - Ladar II
|
||||
2261, # ECCM - Magnetometric II
|
||||
2262, # ECCM - Radar II
|
||||
),
|
||||
5282 : ( # Linked Enduring Sensor Booster
|
||||
7219, # Scattering ECCM Projector I
|
||||
),
|
||||
1986 : ( # Signal Amplifier I
|
||||
2579, # Gravimetric Backup Array I
|
||||
2583, # Ladar Backup Array I
|
||||
2587, # Magnetometric Backup Array I
|
||||
2591, # Multi Sensor Backup Array I
|
||||
4013, # RADAR Backup Array I
|
||||
),
|
||||
4871 : ( # Large Compact Pb-Acid Cap Battery
|
||||
4875, # Large Ohm Capacitor Reserve I
|
||||
4869, # Large F-4a Ld-Sulfate Capacitor Charge Unit
|
||||
4873, # Large Peroxide Capacitor Power Cell
|
||||
),
|
||||
1964 : ( # Remote Sensor Booster II
|
||||
1960, # ECCM Projector II
|
||||
),
|
||||
5933 : ( # Counterbalanced Compact Gyrostabilizer
|
||||
5931, # Cross-Lateral Gyrostabilizer I
|
||||
5935, # F-M3 Munition Inertial Suspensor
|
||||
5929, # Pneumatic Stabilization Actuator I
|
||||
),
|
||||
4025 : ( # X5 Enduring Stasis Webifier
|
||||
4029, # 'Langour' Drive Disruptor I
|
||||
),
|
||||
4027 : ( # Fleeting Compact Stasis Webifier
|
||||
4031, # Patterned Stasis Web I
|
||||
),
|
||||
22937: ( # 'Enterprise' Remote Tracking Computer
|
||||
22935, # 'Tycoon' Remote Tracking Computer
|
||||
),
|
||||
22929: ( # 'Marketeer' Tracking Computer
|
||||
22927, # 'Economist' Tracking Computer I
|
||||
),
|
||||
1987 : ( # Signal Amplifier II
|
||||
2580, # Gravimetric Backup Array II
|
||||
2584, # Ladar Backup Array II
|
||||
2588, # Magnetometric Backup Array II
|
||||
2592, # Multi Sensor Backup Array II
|
||||
4014, # RADAR Backup Array II
|
||||
),
|
||||
19939: ( # Enfeebling Scoped Ladar ECM
|
||||
9522, # Faint Phase Inversion ECM I
|
||||
),
|
||||
5340 : ( # P-S Compact Remote Tracking Computer
|
||||
5341, # 'Prayer' Remote Tracking Computer
|
||||
),
|
||||
19814: ( # Phased Scoped Target Painter
|
||||
19808, # Partial Weapon Navigation
|
||||
),
|
||||
1949 : ( # 'Basic' Signal Amplifier
|
||||
1946, # Basic RADAR Backup Array
|
||||
1982, # Basic Ladar Backup Array
|
||||
1983, # Basic Gravimetric Backup Array
|
||||
1984, # Basic Magnetometric Backup Array
|
||||
1985, # Basic Multi Sensor Backup Array
|
||||
6193, # Emergency Magnetometric Scanners
|
||||
6194, # Emergency Multi-Frequency Scanners
|
||||
6202, # Emergency RADAR Scanners
|
||||
6216, # Emergency Ladar Scanners
|
||||
6217, # Emergency Gravimetric Scanners
|
||||
6225, # Sealed RADAR Backup Cluster
|
||||
6238, # Sealed Magnetometric Backup Cluster
|
||||
6239, # Sealed Multi-Frequency Backup Cluster
|
||||
6241, # Sealed Ladar Backup Cluster
|
||||
6242, # Sealed Gravimetric Backup Cluster
|
||||
6257, # Surplus RADAR Reserve Array
|
||||
6258, # F-42 Reiterative RADAR Backup Sensors
|
||||
6283, # Surplus Magnetometric Reserve Array
|
||||
6284, # F-42 Reiterative Magnetometric Backup Sensors
|
||||
6285, # Surplus Multi-Frequency Reserve Array
|
||||
6286, # F-42 Reiterative Multi-Frequency Backup Sensors
|
||||
6289, # Surplus Ladar Reserve Array
|
||||
6290, # F-42 Reiterative Ladar Backup Sensors
|
||||
6291, # Surplus Gravimetric Reserve Array
|
||||
6292, # F-42 Reiterative Gravimetric Backup Sensors
|
||||
6309, # Amplitude Signal Enhancer
|
||||
6310, # 'Acolyth' Signal Booster
|
||||
6311, # Type-E Discriminative Signal Augmentation
|
||||
6312, # F-90 Positional Signal Amplifier
|
||||
),
|
||||
21527: ( # 'Firewall' Signal Amplifier
|
||||
21521, # Gravimetric Firewall
|
||||
21523, # Ladar Firewall
|
||||
21525, # Magnetometric Firewall
|
||||
21527, # Multi Sensor Firewall
|
||||
21529, # RADAR Firewall
|
||||
),
|
||||
23416: ( # 'Peace' Large Remote Armor Repairer
|
||||
None, # 'Pacifier' Large Remote Armor Repairer
|
||||
),
|
||||
6176 : ( # F-12 Enduring Tracking Computer
|
||||
6174, # Monopulse Tracking Mechanism I
|
||||
),
|
||||
6159 : ( # Alumel-Wired Enduring Sensor Booster
|
||||
7917, # Alumel Radar ECCM Sensor Array I
|
||||
7918, # Alumel Ladar ECCM Sensor Array I
|
||||
7922, # Alumel Gravimetric ECCM Sensor Array I
|
||||
7926, # Alumel Omni ECCM Sensor Array I
|
||||
7937, # Alumel Magnetometric ECCM Sensor Array I
|
||||
7867, # Supplemental Ladar ECCM Scanning Array I
|
||||
7869, # Supplemental Gravimetric ECCM Scanning Array I
|
||||
7870, # Supplemental Omni ECCM Scanning Array I
|
||||
7887, # Supplemental Radar ECCM Scanning Array I
|
||||
7889, # Supplemental Magnetometric ECCM Scanning Array I
|
||||
20216, # Incremental Radar ECCM Scanning Array I
|
||||
20222, # Incremental Ladar ECCM Scanning Array I
|
||||
20228, # Incremental Gravimetric ECCM Scanning Array I
|
||||
20234, # Incremental Magnetometric ECCM Scanning Array I
|
||||
7892, # Prototype ECCM Radar Sensor Cluster
|
||||
7893, # Prototype ECCM Ladar Sensor Cluster
|
||||
7895, # Prototype ECCM Gravimetric Sensor Cluster
|
||||
7896, # Prototype ECCM Omni Sensor Cluster
|
||||
7914, # Prototype ECCM Magnetometric Sensor Cluster
|
||||
6158, # Prototype Sensor Booster
|
||||
),
|
||||
5849 : ( # Extruded Compact Heat Sink
|
||||
5846, # Thermal Exhaust System I
|
||||
5858, # C4S Coiled Circuit Thermal Radiator
|
||||
5857, # 'Skadi' Coolant System I
|
||||
),
|
||||
22895: ( # 'Shady' Sensor Booster
|
||||
22897, # 'Forger' ECCM - Magnetometric I
|
||||
),
|
||||
11105: ( # Vortex Compact Magnetic Field Stabilizer
|
||||
11103, # Insulated Stabilizer Array I
|
||||
11101, # Linear Flux Stabilizer I
|
||||
11107, # Gauss Field Balancer I
|
||||
),
|
||||
22945: ( # 'Executive' Remote Sensor Dampener
|
||||
22943, # 'Broker' Remote Sensor Dampener I
|
||||
),
|
||||
6173 : ( # Optical Compact Tracking Computer
|
||||
6175, # 'Orion' Tracking CPU I
|
||||
),
|
||||
5279 : ( # F-23 Compact Remote Sensor Booster
|
||||
7217, # Spot Pulsing ECCM I
|
||||
7220, # Phased Muon ECCM Caster I
|
||||
5280, # Connected Remote Sensor Booster
|
||||
),
|
||||
4787 : ( # Small Compact Pb-Acid Cap Battery
|
||||
4791, # Small Ohm Capacitor Reserve I
|
||||
4785, # Small F-4a Ld-Sulfate Capacitor Charge Unit
|
||||
4789, # Small Peroxide Capacitor Power Cell
|
||||
),
|
||||
19946: ( # BZ-5 Scoped Gravimetric ECM
|
||||
9519, # FZ-3 Subversive Spatial Destabilizer ECM
|
||||
),
|
||||
6073 : ( # Medium Compact Pb-Acid Cap Battery
|
||||
6097, # Medium Ohm Capacitor Reserve I
|
||||
6111, # Medium F-4a Ld-Sulfate Capacitor Charge Unit
|
||||
6083, # Medium Peroxide Capacitor Power Cell
|
||||
),
|
||||
21484: ( # 'Full Duplex' Ballistic Control System
|
||||
21482, # Ballistic 'Purge' Targeting System I
|
||||
),
|
||||
6296 : ( # F-89 Compact Signal Amplifier
|
||||
6218, # Protected Gravimetric Backup Cluster I
|
||||
6222, # Protected Ladar Backup Cluster I
|
||||
6226, # Protected Magnetometric Backup Cluster I
|
||||
6230, # Protected Multi-Frequency Backup Cluster I
|
||||
6234, # Protected RADAR Backup Cluster I
|
||||
6195, # Reserve Gravimetric Scanners
|
||||
6199, # Reserve Ladar Scanners
|
||||
6203, # Reserve Magnetometric Scanners
|
||||
6207, # Reserve Multi-Frequency Scanners
|
||||
6212, # Reserve RADAR Scanners
|
||||
20238, # Secure Gravimetric Backup Cluster I
|
||||
20244, # Secure Ladar Backup Cluster I
|
||||
20250, # Secure Magnetometric Backup Cluster I
|
||||
20260, # Secure Radar Backup Cluster I
|
||||
6244, # F-43 Repetitive Gravimetric Backup Sensors
|
||||
6252, # F-43 Repetitive Ladar Backup Sensors
|
||||
6260, # F-43 Repetitive Magnetometric Backup Sensors
|
||||
6268, # F-43 Repetitive Multi-Frequency Backup Sensors
|
||||
6276, # F-43 Repetitive RADAR Backup Sensors
|
||||
20240, # Shielded Gravimetric Backup Cluster I
|
||||
20246, # Shielded Ladar Backup Cluster I
|
||||
20252, # Shielded Magnetometric Backup Cluster I
|
||||
20262, # Shielded Radar Backup Cluster I
|
||||
6243, # Surrogate Gravimetric Reserve Array I
|
||||
6251, # Surrogate Ladar Reserve Array I
|
||||
6259, # Surrogate Magnetometric Reserve Array I
|
||||
6267, # Surrogate Multi-Frequency Reserve Array I
|
||||
6275, # Surrogate RADAR Reserve Array I
|
||||
20242, # Warded Gravimetric Backup Cluster I
|
||||
20248, # Warded Ladar Backup Cluster I
|
||||
20254, # Warded Magnetometric Backup Cluster I
|
||||
20264, # Warded Radar Backup Cluster I
|
||||
6294, # 'Mendicant' Signal Booster I
|
||||
6293, # Wavelength Signal Enhancer I
|
||||
6295, # Type-D Attenuation Signal Augmentation
|
||||
),
|
||||
5302 : ( # Phased Muon Scoped Sensor Dampener
|
||||
5300, # Indirect Scanning Dampening Unit I
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "cargo" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
@@ -1,16 +0,0 @@
|
||||
"""
|
||||
Migration 13
|
||||
|
||||
- Alters fits table to introduce implant location attribute
|
||||
"""
|
||||
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Update fits schema to include implant location attribute
|
||||
try:
|
||||
saveddata_engine.execute("SELECT implantLocation FROM fits LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE fits ADD COLUMN implantLocation INTEGER;")
|
||||
saveddata_engine.execute("UPDATE fits SET implantLocation = 0")
|
||||
@@ -1,21 +0,0 @@
|
||||
"""
|
||||
Migration 14
|
||||
|
||||
- This should take care of issue #586.
|
||||
"""
|
||||
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
if saveddata_engine.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type='table' AND name='fighters'").scalar() == 'fighters':
|
||||
# Fighters table exists
|
||||
try:
|
||||
saveddata_engine.execute("SELECT active FROM fighters LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
# if we don't have the active column, we are on an old pre-release version. Drop the tables and move on
|
||||
# (they will be recreated)
|
||||
|
||||
saveddata_engine.execute("DROP TABLE fighters")
|
||||
saveddata_engine.execute("DROP TABLE fightersAbilities")
|
||||
@@ -1,19 +0,0 @@
|
||||
"""
|
||||
Migration 15
|
||||
|
||||
- Delete projected modules on citadels
|
||||
"""
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
sql = """
|
||||
DELETE FROM modules WHERE ID IN
|
||||
(
|
||||
SELECT m.ID FROM modules AS m
|
||||
JOIN fits AS f ON m.fitID = f.ID
|
||||
WHERE f.shipID IN ("35832", "35833", "35834", "40340")
|
||||
AND m.projected = 1
|
||||
)
|
||||
"""
|
||||
|
||||
saveddata_engine.execute(sql)
|
||||
@@ -1,15 +0,0 @@
|
||||
"""
|
||||
Migration 16
|
||||
|
||||
- Alters fits table to introduce notes attribute
|
||||
"""
|
||||
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Update fits schema to include notes attribute
|
||||
try:
|
||||
saveddata_engine.execute("SELECT notes FROM fits LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE fits ADD COLUMN notes VARCHAR;")
|
||||
@@ -1,42 +0,0 @@
|
||||
"""
|
||||
Migration 17
|
||||
|
||||
- Moves all fleet boosters to the new schema
|
||||
"""
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
from eos.db import saveddata_session
|
||||
from eos.db.saveddata.fit import commandFits_table
|
||||
|
||||
sql = """
|
||||
SELECT sm.memberID as boostedFit, s.leaderID AS squadBoost, w.leaderID AS wingBoost, g.leaderID AS gangBoost
|
||||
FROM squadmembers sm
|
||||
JOIN squads s ON s.ID = sm.squadID
|
||||
JOIN wings w on w.ID = s.wingID
|
||||
JOIN gangs g on g.ID = w.gangID
|
||||
"""
|
||||
try:
|
||||
results = saveddata_session.execute(sql)
|
||||
|
||||
inserts = []
|
||||
|
||||
for row in results:
|
||||
boosted = row["boostedFit"]
|
||||
types = ("squad", "wing", "gang")
|
||||
for x in types:
|
||||
value = row["{}Boost".format(x)]
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
inserts.append({"boosterID": value, "boostedID": boosted, "active": 1})
|
||||
try:
|
||||
saveddata_session.execute(commandFits_table.insert(),
|
||||
{"boosterID": value, "boostedID": boosted, "active": 1})
|
||||
except Exception:
|
||||
pass
|
||||
saveddata_session.commit()
|
||||
except:
|
||||
# Shouldn't fail unless you have updated database without the old fleet schema and manually modify the database version
|
||||
# If it does, simply fail. Fleet data migration isn't critically important here
|
||||
pass
|
||||
@@ -1,68 +0,0 @@
|
||||
"""
|
||||
Migration 8
|
||||
|
||||
- Converts modules from old Warfare Links to Command Modules
|
||||
"""
|
||||
|
||||
CONVERSIONS = {
|
||||
42526: ( # Armor Command Burst I
|
||||
20069, # Armored Warfare Link - Damage Control I
|
||||
20409, # Armored Warfare Link - Passive Defense I
|
||||
22227, # Armored Warfare Link - Rapid Repair I
|
||||
),
|
||||
43552: ( # Armor Command Burst II
|
||||
4264, # Armored Warfare Link - Damage Control II
|
||||
4266, # Armored Warfare Link - Passive Defense II
|
||||
4266, # Armored Warfare Link - Rapid Repair II
|
||||
),
|
||||
42527: ( # Information Command Burst I
|
||||
11052, # Information Warfare Link - Sensor Integrity I
|
||||
20405, # Information Warfare Link - Recon Operation I
|
||||
20406, # Information Warfare Link - Electronic Superiority I
|
||||
),
|
||||
43554: ( # Information Command Burst II
|
||||
4268, # Information Warfare Link - Electronic Superiority II
|
||||
4270, # Information Warfare Link - Recon Operation II
|
||||
4272, # Information Warfare Link - Sensor Integrity II
|
||||
),
|
||||
42529: ( # Shield Command Burst I
|
||||
20124, # Siege Warfare Link - Active Shielding I
|
||||
20514, # Siege Warfare Link - Shield Harmonizing I
|
||||
22228, # Siege Warfare Link - Shield Efficiency I
|
||||
),
|
||||
43555: ( # Shield Command Burst II
|
||||
4280, # Siege Warfare Link - Active Shielding II
|
||||
4282, # Siege Warfare Link - Shield Efficiency II
|
||||
4284 # Siege Warfare Link - Shield Harmonizing II
|
||||
),
|
||||
42530: ( # Skirmish Command Burst I
|
||||
11017, # Skirmish Warfare Link - Interdiction Maneuvers I
|
||||
20070, # Skirmish Warfare Link - Evasive Maneuvers I
|
||||
20408, # Skirmish Warfare Link - Rapid Deployment I
|
||||
),
|
||||
43556: ( # Skirmish Command Burst II
|
||||
4286, # Skirmish Warfare Link - Evasive Maneuvers II
|
||||
4288, # Skirmish Warfare Link - Interdiction Maneuvers II
|
||||
4290 # Skirmish Warfare Link - Rapid Deployment II
|
||||
),
|
||||
42528: ( # Mining Foreman Burst I
|
||||
22553, # Mining Foreman Link - Harvester Capacitor Efficiency I
|
||||
22555, # Mining Foreman Link - Mining Laser Field Enhancement I
|
||||
22557, # Mining Foreman Link - Laser Optimization I
|
||||
),
|
||||
43551: ( # Mining Foreman Burst II
|
||||
4274, # Mining Foreman Link - Harvester Capacitor Efficiency II
|
||||
4276, # Mining Foreman Link - Laser Optimization II
|
||||
4278 # Mining Foreman Link - Mining Laser Field Enhancement II
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "cargo" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
@@ -1,18 +0,0 @@
|
||||
"""
|
||||
Migration 19
|
||||
|
||||
- Deletes broken references to fits from the commandFits table (see GH issue #844)
|
||||
"""
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
from eos.db import saveddata_session
|
||||
|
||||
sql = """
|
||||
DELETE FROM commandFits
|
||||
WHERE boosterID NOT IN (select ID from fits)
|
||||
OR boostedID NOT IN (select ID from fits)
|
||||
"""
|
||||
|
||||
saveddata_session.execute(sql)
|
||||
saveddata_session.commit()
|
||||
@@ -6,7 +6,6 @@ Migration 2
|
||||
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Update characters schema to include default chars
|
||||
try:
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
"""
|
||||
Migration 20
|
||||
|
||||
- Adds support for alpha clones to the characters table
|
||||
"""
|
||||
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Update characters schema to include alphaCloneID
|
||||
try:
|
||||
saveddata_engine.execute("SELECT alphaCloneID FROM characters LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE characters ADD COLUMN alphaCloneID INTEGER;")
|
||||
@@ -1,10 +0,0 @@
|
||||
"""
|
||||
Migration 21
|
||||
|
||||
- Fixes discrepancy in drone table where we may have an amount active that is not equal to the amount in the stack
|
||||
(we don't support activating only 2/5 drones). See GH issue #728
|
||||
"""
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
saveddata_engine.execute("UPDATE drones SET amountActive = amount where amountActive > 0 AND amountActive <> amount;")
|
||||
@@ -1,45 +0,0 @@
|
||||
"""
|
||||
Migration 22
|
||||
|
||||
- Adds the created and modified fields to most tables
|
||||
"""
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
|
||||
# 1 = created only
|
||||
# 2 = created and modified
|
||||
tables = {
|
||||
"boosters": 2,
|
||||
"cargo": 2,
|
||||
"characters": 2,
|
||||
# "crest": 1,
|
||||
"damagePatterns": 2,
|
||||
"drones": 2,
|
||||
"fighters": 2,
|
||||
"fits": 2,
|
||||
"projectedFits": 2,
|
||||
"commandFits": 2,
|
||||
"implants": 2,
|
||||
"implantSets": 2,
|
||||
"modules": 2,
|
||||
"overrides": 2,
|
||||
"characterSkills": 2,
|
||||
"targetResists": 2
|
||||
}
|
||||
|
||||
for table in list(tables.keys()):
|
||||
|
||||
# midnight brain, there's probably a much more simple way to do this, but fuck it
|
||||
if tables[table] > 0:
|
||||
try:
|
||||
saveddata_engine.execute("SELECT created FROM {0} LIMIT 1;".format(table))
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE {} ADD COLUMN created DATETIME;".format(table))
|
||||
|
||||
if tables[table] > 1:
|
||||
try:
|
||||
saveddata_engine.execute("SELECT modified FROM {0} LIMIT 1;".format(table))
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE {} ADD COLUMN modified DATETIME;".format(table))
|
||||
@@ -1,13 +0,0 @@
|
||||
"""
|
||||
Migration 23
|
||||
|
||||
- Adds a sec status field to the character table
|
||||
"""
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
try:
|
||||
saveddata_engine.execute("SELECT secStatus FROM characters LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE characters ADD COLUMN secStatus FLOAT;")
|
||||
@@ -1,14 +0,0 @@
|
||||
"""
|
||||
Migration 24
|
||||
|
||||
- Adds a boolean value to fit to signify if fit should ignore restrictions
|
||||
"""
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
try:
|
||||
saveddata_engine.execute("SELECT ignoreRestrictions FROM fits LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE fits ADD COLUMN ignoreRestrictions BOOLEAN")
|
||||
saveddata_engine.execute("UPDATE fits SET ignoreRestrictions = 0")
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,9 +0,0 @@
|
||||
"""
|
||||
Migration 26
|
||||
|
||||
- Deletes invalid command fit relationships caused by a bug (see #1244)
|
||||
"""
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
saveddata_engine.execute("DELETE FROM commandFits WHERE boosterID NOT IN (SELECT ID FROM fits) OR boostedID NOT IN (SELECT ID FROM fits)")
|
||||
@@ -1,9 +0,0 @@
|
||||
"""
|
||||
Migration 27
|
||||
|
||||
- Resets all alpha clones to 1 (CCP consolidated all alpha's into one skillset)
|
||||
"""
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
saveddata_engine.execute("UPDATE characters SET alphaCloneID = 1 WHERE alphaCloneID IS NOT NULL")
|
||||
@@ -1,18 +0,0 @@
|
||||
"""
|
||||
Migration 28
|
||||
|
||||
- adds baseItemID and mutaplasmidID to modules table
|
||||
"""
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
try:
|
||||
saveddata_engine.execute("SELECT baseItemID FROM modules LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE modules ADD COLUMN baseItemID INT;")
|
||||
|
||||
try:
|
||||
saveddata_engine.execute("SELECT mutaplasmidID FROM modules LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE modules ADD COLUMN mutaplasmidID INT;")
|
||||
@@ -1,18 +0,0 @@
|
||||
"""
|
||||
Migration 29
|
||||
|
||||
- adds spoolType and spoolAmount to modules table
|
||||
"""
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
try:
|
||||
saveddata_engine.execute("SELECT spoolType FROM modules LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE modules ADD COLUMN spoolType INT;")
|
||||
|
||||
try:
|
||||
saveddata_engine.execute("SELECT spoolAmount FROM modules LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE modules ADD COLUMN spoolAmount FLOAT;")
|
||||
@@ -6,7 +6,6 @@ Migration 3
|
||||
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
try:
|
||||
saveddata_engine.execute("SELECT modeID FROM fits LIMIT 1")
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
"""
|
||||
Migration 30
|
||||
|
||||
- changes to prices table
|
||||
"""
|
||||
|
||||
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
try:
|
||||
saveddata_engine.execute("SELECT status FROM prices LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
# Just drop table, table will be re-created by sqlalchemy and
|
||||
# data will be re-fetched
|
||||
saveddata_engine.execute("DROP TABLE prices;")
|
||||
@@ -1,15 +0,0 @@
|
||||
"""
|
||||
Migration 31
|
||||
|
||||
- added fit system security column
|
||||
"""
|
||||
|
||||
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
try:
|
||||
saveddata_engine.execute("SELECT systemSecurity FROM fits LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE fits ADD COLUMN systemSecurity INT")
|
||||
@@ -1,16 +0,0 @@
|
||||
"""
|
||||
Migration 32
|
||||
|
||||
- added speed, sig and radius columns to targetResists table
|
||||
"""
|
||||
|
||||
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
for column in ('maxVelocity', 'signatureRadius', 'radius'):
|
||||
try:
|
||||
saveddata_engine.execute("SELECT {} FROM targetResists LIMIT 1;".format(column))
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE targetResists ADD COLUMN {} FLOAT;".format(column))
|
||||
@@ -1,30 +0,0 @@
|
||||
"""
|
||||
Migration 33
|
||||
|
||||
Allow use of floats in damage pattern values
|
||||
"""
|
||||
|
||||
tmpTable = """
|
||||
CREATE TABLE "damagePatternsTemp" (
|
||||
"ID" INTEGER NOT NULL,
|
||||
"name" VARCHAR,
|
||||
"emAmount" FLOAT,
|
||||
"thermalAmount" FLOAT,
|
||||
"kineticAmount" FLOAT,
|
||||
"explosiveAmount" FLOAT,
|
||||
"ownerID" INTEGER,
|
||||
"created" DATETIME,
|
||||
"modified" DATETIME,
|
||||
PRIMARY KEY ("ID"),
|
||||
FOREIGN KEY("ownerID") REFERENCES users ("ID")
|
||||
)
|
||||
"""
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
saveddata_engine.execute(tmpTable)
|
||||
saveddata_engine.execute(
|
||||
'INSERT INTO damagePatternsTemp (ID, name, emAmount, thermalAmount, kineticAmount, explosiveAmount, ownerID, created, modified) '
|
||||
'SELECT ID, name, emAmount, thermalAmount, kineticAmount, explosiveAmount, ownerID, created, modified FROM damagePatterns')
|
||||
saveddata_engine.execute('DROP TABLE damagePatterns')
|
||||
saveddata_engine.execute('ALTER TABLE damagePatternsTemp RENAME TO damagePatterns')
|
||||
@@ -10,65 +10,66 @@ Migration 4
|
||||
and output of itemDiff.py
|
||||
"""
|
||||
|
||||
|
||||
CONVERSIONS = {
|
||||
506 : ( # 'Basic' Capacitor Power Relay
|
||||
506: ( # 'Basic' Capacitor Power Relay
|
||||
8205, # Alpha Reactor Control: Capacitor Power Relay
|
||||
8209, # Marked Generator Refitting: Capacitor Power Relay
|
||||
8203, # Partial Power Plant Manager: Capacity Power Relay
|
||||
8207, # Type-E Power Core Modification: Capacitor Power Relay
|
||||
),
|
||||
8177 : ( # Mark I Compact Capacitor Power Relay
|
||||
8177: ( # Mark I Compact Capacitor Power Relay
|
||||
8173, # Beta Reactor Control: Capacitor Power Relay I
|
||||
),
|
||||
8175 : ( # Type-D Restrained Capacitor Power Relay
|
||||
8175: ( # Type-D Restrained Capacitor Power Relay
|
||||
8171, # Local Power Plant Manager: Capacity Power Relay I
|
||||
),
|
||||
|
||||
421 : ( # 'Basic' Capacitor Recharger
|
||||
421: ( # 'Basic' Capacitor Recharger
|
||||
4425, # AGM Capacitor Charge Array,
|
||||
4421, # F-a10 Buffer Capacitor Regenerator
|
||||
4423, # Industrial Capacitor Recharger
|
||||
4427, # Secondary Parallel Link-Capacitor
|
||||
),
|
||||
4435 : ( # Eutectic Compact Cap Recharger
|
||||
4435: ( # Eutectic Compact Cap Recharger
|
||||
4433, # Barton Reactor Capacitor Recharger I
|
||||
4431, # F-b10 Nominal Capacitor Regenerator
|
||||
4437, # Fixed Parallel Link-Capacitor I
|
||||
),
|
||||
|
||||
1315 : ( # 'Basic' Expanded Cargohold
|
||||
1315: ( # 'Basic' Expanded Cargohold
|
||||
5483, # Alpha Hull Mod Expanded Cargo
|
||||
5479, # Marked Modified SS Expanded Cargo
|
||||
5481, # Partial Hull Conversion Expanded Cargo
|
||||
5485, # Type-E Altered SS Expanded Cargo
|
||||
),
|
||||
5493 : ( # Type-D Restrained Expanded Cargo
|
||||
5493: ( # Type-D Restrained Expanded Cargo
|
||||
5491, # Beta Hull Mod Expanded Cargo
|
||||
5489, # Local Hull Conversion Expanded Cargo I
|
||||
5487, # Mark I Modified SS Expanded Cargo
|
||||
),
|
||||
|
||||
1401 : ( # 'Basic' Inertial Stabilizers
|
||||
1401: ( # 'Basic' Inertial Stabilizers
|
||||
5523, # Alpha Hull Mod Inertial Stabilizers
|
||||
5521, # Partial Hull Conversion Inertial Stabilizers
|
||||
5525, # Type-E Altered SS Inertial Stabilizers
|
||||
),
|
||||
5533 : ( # Type-D Restrained Inertial Stabilizers
|
||||
5533: ( # Type-D Restrained Inertial Stabilizers
|
||||
5531, # Beta Hull Mod Inertial Stabilizers
|
||||
5529, # Local Hull Conversion Inertial Stabilizers I
|
||||
5527, # Mark I Modified SS Inertial Stabilizers
|
||||
5519, # Marked Modified SS Inertial Stabilizers
|
||||
),
|
||||
|
||||
5239 : ( # EP-S Gaussian Scoped Mining Laser
|
||||
5239: ( # EP-S Gaussian Scoped Mining Laser
|
||||
5241, # Dual Diode Mining Laser I
|
||||
),
|
||||
5233 : ( # Single Diode Basic Mining Laser
|
||||
5233: ( # Single Diode Basic Mining Laser
|
||||
5231, # EP-R Argon Ion Basic Excavation Pulse
|
||||
5237, # Rubin Basic Particle Bore Stream
|
||||
5235, # Xenon Basic Drilling Beam
|
||||
),
|
||||
5245 : ( # Particle Bore Compact Mining Laser
|
||||
5245: ( # Particle Bore Compact Mining Laser
|
||||
5243, # XeCl Drilling Beam I
|
||||
),
|
||||
|
||||
@@ -79,63 +80,62 @@ CONVERSIONS = {
|
||||
22609, # Erin Mining Laser Upgrade
|
||||
),
|
||||
|
||||
1242 : ( # 'Basic' Nanofiber Internal Structure
|
||||
1242: ( # 'Basic' Nanofiber Internal Structure
|
||||
5591, # Alpha Hull Mod Nanofiber Structure
|
||||
5595, # Marked Modified SS Nanofiber Structure
|
||||
5559, # Partial Hull Conversion Nanofiber Structure
|
||||
5593, # Type-E Altered SS Nanofiber Structure
|
||||
),
|
||||
5599 : ( # Type-D Restrained Nanofiber Structure
|
||||
5599: ( # Type-D Restrained Nanofiber Structure
|
||||
5597, # Beta Hull Mod Nanofiber Structure
|
||||
5561, # Local Hull Conversion Nanofiber Structure I
|
||||
5601, # Mark I Modified SS Nanofiber Structure
|
||||
),
|
||||
|
||||
1192 : ( # 'Basic' Overdrive Injector System
|
||||
1192: ( # 'Basic' Overdrive Injector System
|
||||
5613, # Alpha Hull Mod Overdrive Injector
|
||||
5617, # Marked Modified SS Overdrive Injector
|
||||
5611, # Partial Hull Conversion Overdrive Injector
|
||||
5615, # Type-E Altered SS Overdrive Injector
|
||||
),
|
||||
5631 : ( # Type-D Restrained Overdrive Injector
|
||||
5631: ( # Type-D Restrained Overdrive Injector
|
||||
5629, # Beta Hull Mod Overdrive Injector
|
||||
5627, # Local Hull Conversion Overdrive Injector I
|
||||
5633, # Mark I Modified SS Overdrive Injector
|
||||
),
|
||||
|
||||
1537 : ( # 'Basic' Power Diagnostic System
|
||||
1537: ( # 'Basic' Power Diagnostic System
|
||||
8213, # Alpha Reactor Control: Diagnostic System
|
||||
8217, # Marked Generator Refitting: Diagnostic System
|
||||
8211, # Partial Power Plant Manager: Diagnostic System
|
||||
8215, # Type-E Power Core Modification: Diagnostic System
|
||||
8255, # Type-E Power Core Modification: Reaction Control
|
||||
),
|
||||
8225 : ( # Mark I Compact Power Diagnostic System
|
||||
8225: ( # Mark I Compact Power Diagnostic System
|
||||
8221, # Beta Reactor Control: Diagnostic System I
|
||||
8219, # Local Power Plant Manager: Diagnostic System I
|
||||
8223, # Type-D Power Core Modification: Diagnostic System
|
||||
),
|
||||
|
||||
1240 : ( # 'Basic' Reinforced Bulkheads
|
||||
1240: ( # 'Basic' Reinforced Bulkheads
|
||||
5677, # Alpha Hull Mod Reinforced Bulkheads
|
||||
5681, # Marked Modified SS Reinforced Bulkheads
|
||||
5675, # Partial Hull Conversion Reinforced Bulkheads
|
||||
5679, # Type-E Altered SS Reinforced Bulkheads
|
||||
),
|
||||
5649 : ( # Mark I Compact Reinforced Bulkheads
|
||||
5649: ( # Mark I Compact Reinforced Bulkheads
|
||||
5645, # Beta Hull Mod Reinforced Bulkheads
|
||||
),
|
||||
5647 : ( # Type-D Restrained Reinforced Bulkheads
|
||||
5647: ( # Type-D Restrained Reinforced Bulkheads
|
||||
5643, # Local Hull Conversion Reinforced Bulkheads I
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for replacement_item, list in CONVERSIONS.iteritems():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "cargo" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?', (replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "cargo" SET "itemID" = ? WHERE "itemID" = ?', (replacement_item, retired_item))
|
||||
|
||||
|
||||
@@ -4,6 +4,5 @@ Migration 5
|
||||
Simply deletes damage profiles with a blank name. See GH issue #256
|
||||
"""
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
saveddata_engine.execute('DELETE FROM damagePatterns WHERE name LIKE ?', ("",))
|
||||
|
||||
@@ -4,8 +4,6 @@ Migration 6
|
||||
Overwrites damage profile 0 to reset bad uniform values (bad values set with bug)
|
||||
"""
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
saveddata_engine.execute('DELETE FROM damagePatterns WHERE name LIKE ? OR ID LIKE ?', ("Uniform", "1"))
|
||||
saveddata_engine.execute('INSERT INTO damagePatterns (ID, name, emAmount, thermalAmount, kineticAmount, explosiveAmount, ownerID) VALUES (?, ?, ?, ?, ?, ?, ?)',
|
||||
(1, "Uniform", 25, 25, 25, 25, None))
|
||||
saveddata_engine.execute('INSERT INTO damagePatterns VALUES (?, ?, ?, ?, ?, ?, ?)', (1, "Uniform", 25, 25, 25, 25, None))
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
"""
|
||||
Migration 7
|
||||
|
||||
- Converts Scorpion Ishukone Watch to Scorpion
|
||||
|
||||
Mosaic introduced proper skinning system, and Ishukone Scorp
|
||||
was the only ship which was presented as stand-alone ship in
|
||||
Pyfa.
|
||||
"""
|
||||
|
||||
CONVERSIONS = {
|
||||
640: ( # Scorpion
|
||||
4005, # Scorpion Ishukone Watch
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert ships
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "fits" SET "shipID" = ? WHERE "shipID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
@@ -1,85 +0,0 @@
|
||||
"""
|
||||
Migration 8
|
||||
|
||||
- Converts modules based on Carnyx Module Tiericide
|
||||
Some modules have been unpublished (and unpublished module attributes are removed
|
||||
from database), which causes pyfa to crash. We therefore replace these
|
||||
modules with their new replacements
|
||||
"""
|
||||
|
||||
CONVERSIONS = {
|
||||
8529 : ( # Large F-S9 Regolith Compact Shield Extender
|
||||
8409, # Large Subordinate Screen Stabilizer I
|
||||
),
|
||||
8419 : ( # Large Azeotropic Restrained Shield Extender
|
||||
8489, # Large Supplemental Barrier Emitter I
|
||||
),
|
||||
8517 : ( # Medium F-S9 Regolith Compact Shield Extender
|
||||
8397, # Medium Subordinate Screen Stabilizer I
|
||||
),
|
||||
8433 : ( # Medium Azeotropic Restrained Shield Extender
|
||||
8477, # Medium Supplemental Barrier Emitter I
|
||||
),
|
||||
20627: ( # Small 'Trapper' Shield Extender
|
||||
8437, # Micro Azeotropic Ward Salubrity I
|
||||
8505, # Micro F-S9 Regolith Shield Induction
|
||||
3849, # Micro Shield Extender I
|
||||
3851, # Micro Shield Extender II
|
||||
8387, # Micro Subordinate Screen Stabilizer I
|
||||
8465, # Micro Supplemental Barrier Emitter I
|
||||
),
|
||||
8521 : ( # Small F-S9 Regolith Compact Shield Extender
|
||||
8401, # Small Subordinate Screen Stabilizer I
|
||||
),
|
||||
8427 : ( # Small Azeotropic Restrained Shield Extender
|
||||
8481, # Small Supplemental Barrier Emitter I
|
||||
),
|
||||
11343: ( # 100mm Crystalline Carbonide Restrained Plates
|
||||
11345, # 100mm Reinforced Nanofiber Plates I
|
||||
),
|
||||
11341: ( # 100mm Rolled Tungsten Compact Plates
|
||||
11339, # 100mm Reinforced Titanium Plates I
|
||||
),
|
||||
11327: ( # 1600mm Crystalline Carbonide Restrained Plates
|
||||
11329, # 1600mm Reinforced Nanofiber Plates I
|
||||
),
|
||||
11325: ( # 1600mm Rolled Tungsten Compact Plates
|
||||
11323, # 1600mm Reinforced Titanium Plates I
|
||||
),
|
||||
11351: ( # 200mm Crystalline Carbonide Restrained Plates
|
||||
11353, # 200mm Reinforced Nanofiber Plates I
|
||||
),
|
||||
11349: ( # 200mm Rolled Tungsten Compact Plates
|
||||
11347, # 200mm Reinforced Titanium Plates I
|
||||
),
|
||||
11311: ( # 400mm Crystalline Carbonide Restrained Plates
|
||||
11313, # 400mm Reinforced Nanofiber Plates I
|
||||
),
|
||||
11309: ( # 400mm Rolled Tungsten Compact Plates
|
||||
11307, # 400mm Reinforced Titanium Plates I
|
||||
),
|
||||
23791: ( # 'Citadella' 100mm Steel Plates
|
||||
11335, # 50mm Reinforced Crystalline Carbonide Plates I
|
||||
11337, # 50mm Reinforced Nanofiber Plates I
|
||||
11333, # 50mm Reinforced Rolled Tungsten Plates I
|
||||
11291, # 50mm Reinforced Steel Plates I
|
||||
20343, # 50mm Reinforced Steel Plates II
|
||||
11331, # 50mm Reinforced Titanium Plates I
|
||||
),
|
||||
11319: ( # 800mm Crystalline Carbonide Restrained Plates
|
||||
11321, # 800mm Reinforced Nanofiber Plates I
|
||||
),
|
||||
11317: ( # 800mm Rolled Tungsten Compact Plates
|
||||
11315, # 800mm Reinforced Titanium Plates I
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
saveddata_engine.execute('UPDATE "cargo" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
@@ -1,25 +0,0 @@
|
||||
"""
|
||||
Migration 9
|
||||
|
||||
Effectively drops UNIQUE constraint from boosters table. SQLite does not support
|
||||
this, so we have to copy the table to the updated schema and then rename it
|
||||
"""
|
||||
|
||||
tmpTable = """
|
||||
CREATE TABLE boostersTemp (
|
||||
'ID' INTEGER NOT NULL,
|
||||
'itemID' INTEGER,
|
||||
'fitID' INTEGER NOT NULL,
|
||||
'active' BOOLEAN,
|
||||
PRIMARY KEY(ID),
|
||||
FOREIGN KEY('fitID') REFERENCES fits ('ID')
|
||||
)
|
||||
"""
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
saveddata_engine.execute(tmpTable)
|
||||
saveddata_engine.execute(
|
||||
"INSERT INTO boostersTemp (ID, itemID, fitID, active) SELECT ID, itemID, fitID, active FROM boosters")
|
||||
saveddata_engine.execute("DROP TABLE boosters")
|
||||
saveddata_engine.execute("ALTER TABLE boostersTemp RENAME TO boosters")
|
||||
@@ -1,18 +1,3 @@
|
||||
__all__ = [
|
||||
"character",
|
||||
"fit",
|
||||
"mutator",
|
||||
"module",
|
||||
"user",
|
||||
"skill",
|
||||
"price",
|
||||
"booster",
|
||||
"drone",
|
||||
"implant",
|
||||
"damagePattern",
|
||||
"miscData",
|
||||
"targetProfile",
|
||||
"override",
|
||||
"implantSet",
|
||||
"loadDefaultDatabaseValues"
|
||||
]
|
||||
__all__ = ["character", "fit", "module", "user", "skill", "price",
|
||||
"booster", "drone", "implant", "fleet", "damagePattern",
|
||||
"miscData", "targetResists"]
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
@@ -15,41 +15,33 @@
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, ForeignKey, Integer, Boolean, DateTime
|
||||
from sqlalchemy import Table, Column, ForeignKey, Integer, UniqueConstraint, Boolean
|
||||
from sqlalchemy.orm import mapper, relation
|
||||
import datetime
|
||||
from sqlalchemy.ext.associationproxy import association_proxy
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.saveddata.booster import Booster
|
||||
from eos.saveddata.boosterSideEffect import BoosterSideEffect
|
||||
from eos.types import Booster
|
||||
|
||||
boosters_table = Table("boosters", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("ID", Integer, primary_key = True),
|
||||
Column("itemID", Integer),
|
||||
Column("fitID", Integer, ForeignKey("fits.ID"), nullable=False),
|
||||
Column("fitID", Integer, ForeignKey("fits.ID"), nullable = False),
|
||||
Column("active", Boolean),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now),
|
||||
)
|
||||
|
||||
|
||||
booster_side_effect_table = Table("boosterSideEffects", saveddata_meta,
|
||||
Column("boosterID", Integer, ForeignKey("boosters.ID"), primary_key=True, index=True),
|
||||
Column("effectID", Integer, nullable=False, primary_key=True),
|
||||
Column("active", Boolean, default=False)
|
||||
)
|
||||
UniqueConstraint("itemID", "fitID"))
|
||||
|
||||
activeSideEffects_table = Table("boostersActiveSideEffects", saveddata_meta,
|
||||
Column("boosterID", ForeignKey("boosters.ID"), primary_key = True),
|
||||
Column("effectID", Integer, primary_key = True))
|
||||
|
||||
class ActiveSideEffectsDummy(object):
|
||||
def __init__(self, effectID):
|
||||
self.effectID = effectID
|
||||
|
||||
|
||||
mapper(ActiveSideEffectsDummy, activeSideEffects_table)
|
||||
mapper(Booster, boosters_table,
|
||||
properties={
|
||||
"_Booster__sideEffects": relation(
|
||||
BoosterSideEffect,
|
||||
backref="booster",
|
||||
cascade='all, delete, delete-orphan'),
|
||||
}
|
||||
)
|
||||
properties = {"_Booster__activeSideEffectDummies" : relation(ActiveSideEffectsDummy)})
|
||||
|
||||
|
||||
mapper(BoosterSideEffect, booster_side_effect_table)
|
||||
Booster._Booster__activeSideEffectIDs = association_proxy("_Booster__activeSideEffectDummies", "effectID")
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
@@ -15,27 +15,18 @@
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, DateTime
|
||||
from sqlalchemy.orm import mapper, relation
|
||||
import datetime
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, Boolean
|
||||
from sqlalchemy.orm import mapper
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.saveddata.cargo import Cargo
|
||||
from eos.saveddata.fit import Fit
|
||||
from eos.types import Cargo
|
||||
|
||||
cargo_table = Table("cargo", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("fitID", Integer, ForeignKey("fits.ID"), nullable=False, index=True),
|
||||
Column("itemID", Integer, nullable=False),
|
||||
Column("amount", Integer, nullable=False),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now),
|
||||
)
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("fitID", Integer, ForeignKey("fits.ID"), nullable = False, index = True),
|
||||
Column("itemID", Integer, nullable = False),
|
||||
Column("amount", Integer, nullable = False))
|
||||
|
||||
mapper(Cargo, cargo_table,
|
||||
properties={
|
||||
"owner": relation(Fit)
|
||||
}
|
||||
)
|
||||
mapper(Cargo, cargo_table)
|
||||
@@ -1,4 +1,4 @@
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
@@ -15,77 +15,30 @@
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, String, DateTime, Float, UniqueConstraint
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, String
|
||||
from sqlalchemy.orm import relation, mapper
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.db.saveddata.implant import charImplants_table
|
||||
from eos.effectHandlerHelpers import HandledImplantList, HandledSsoCharacterList
|
||||
from eos.saveddata.implant import Implant
|
||||
from eos.saveddata.user import User
|
||||
from eos.saveddata.character import Character, Skill
|
||||
from eos.saveddata.ssocharacter import SsoCharacter
|
||||
from eos.types import Character, User, Skill, Implant
|
||||
from eos.effectHandlerHelpers import HandledImplantBoosterList
|
||||
|
||||
characters_table = Table("characters", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("name", String, nullable=False),
|
||||
Column("ID", Integer, primary_key = True),
|
||||
Column("name", String, nullable = False),
|
||||
Column("apiID", Integer),
|
||||
Column("apiKey", String),
|
||||
Column("defaultChar", Integer),
|
||||
Column("chars", String, nullable = True),
|
||||
Column("defaultLevel", Integer, nullable=True),
|
||||
Column("alphaCloneID", Integer, nullable=True),
|
||||
Column("ownerID", ForeignKey("users.ID"), nullable=True),
|
||||
Column("secStatus", Float, nullable=True, default=0.0),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now))
|
||||
|
||||
sso_table = Table("ssoCharacter", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("client", String, nullable=False),
|
||||
Column("characterID", Integer, nullable=False),
|
||||
Column("characterName", String, nullable=False),
|
||||
Column("refreshToken", String, nullable=False),
|
||||
Column("accessToken", String, nullable=False),
|
||||
Column("accessTokenExpires", DateTime, nullable=False),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now),
|
||||
UniqueConstraint('client', 'characterID', name='uix_client_characterID'),
|
||||
UniqueConstraint('client', 'characterName', name='uix_client_characterName')
|
||||
)
|
||||
|
||||
sso_character_map_table = Table("ssoCharacterMap", saveddata_meta,
|
||||
Column("characterID", ForeignKey("characters.ID"), primary_key=True),
|
||||
Column("ssoCharacterID", ForeignKey("ssoCharacter.ID"), primary_key=True),
|
||||
)
|
||||
|
||||
|
||||
mapper(SsoCharacter, sso_table)
|
||||
Column("ownerID", ForeignKey("users.ID"), nullable = True))
|
||||
|
||||
mapper(Character, characters_table,
|
||||
properties={
|
||||
"_Character__alphaCloneID": characters_table.c.alphaCloneID,
|
||||
"savedName" : characters_table.c.name,
|
||||
"_Character__secStatus": characters_table.c.secStatus,
|
||||
"_Character__owner" : relation(
|
||||
User,
|
||||
backref="characters"),
|
||||
"_Character__skills" : relation(
|
||||
Skill,
|
||||
backref="character",
|
||||
cascade="all,delete-orphan"),
|
||||
"_Character__implants" : relation(
|
||||
Implant,
|
||||
collection_class=HandledImplantList,
|
||||
cascade='all,delete-orphan',
|
||||
backref='character',
|
||||
single_parent=True,
|
||||
primaryjoin=charImplants_table.c.charID == characters_table.c.ID,
|
||||
secondaryjoin=charImplants_table.c.implantID == Implant.ID,
|
||||
secondary=charImplants_table),
|
||||
"_Character__ssoCharacters" : relation(
|
||||
SsoCharacter,
|
||||
collection_class=HandledSsoCharacterList,
|
||||
backref='characters',
|
||||
secondary=sso_character_map_table)
|
||||
}
|
||||
)
|
||||
properties = {"_Character__owner" : relation(User, backref = "characters"),
|
||||
"_Character__skills" : relation(Skill, backref="character", cascade = "all,delete-orphan"),
|
||||
"_Character__implants" : relation(Implant, collection_class = HandledImplantBoosterList, cascade='all,delete-orphan', single_parent=True,
|
||||
primaryjoin = charImplants_table.c.charID == characters_table.c.ID,
|
||||
secondaryjoin = charImplants_table.c.implantID == Implant.ID,
|
||||
secondary = charImplants_table),})
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
@@ -15,25 +15,21 @@
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, Float, ForeignKey, String, DateTime
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, String
|
||||
from sqlalchemy.orm import mapper
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.saveddata.damagePattern import DamagePattern
|
||||
from eos.types import DamagePattern
|
||||
|
||||
damagePatterns_table = Table("damagePatterns", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("ID", Integer, primary_key = True),
|
||||
Column("name", String),
|
||||
Column("emAmount", Float),
|
||||
Column("thermalAmount", Float),
|
||||
Column("kineticAmount", Float),
|
||||
Column("explosiveAmount", Float),
|
||||
Column("ownerID", ForeignKey("users.ID"), nullable=True),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now)
|
||||
)
|
||||
Column("emAmount", Integer),
|
||||
Column("thermalAmount", Integer),
|
||||
Column("kineticAmount", Integer),
|
||||
Column("explosiveAmount", Integer),
|
||||
Column("ownerID", ForeignKey("users.ID"), nullable=True))
|
||||
|
||||
mapper(DamagePattern, damagePatterns_table)
|
||||
|
||||
@@ -1,239 +0,0 @@
|
||||
# ===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of pyfa.
|
||||
#
|
||||
# pyfa is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# pyfa is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy.exc import DatabaseError
|
||||
from logbook import Logger
|
||||
|
||||
pyfalog = Logger(__name__)
|
||||
|
||||
|
||||
class DatabaseCleanup:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def ExecuteSQLQuery(saveddata_engine, query):
|
||||
try:
|
||||
results = saveddata_engine.execute(query)
|
||||
return results
|
||||
except DatabaseError:
|
||||
pyfalog.error("Failed to connect to database or error executing query:\n{0}", query)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def OrphanedCharacterSkills(saveddata_engine):
|
||||
# Find orphaned character skills.
|
||||
# This solves an issue where the character doesn't exist, but skills for that character do.
|
||||
# See issue #917
|
||||
pyfalog.debug("Running database cleanup for character skills.")
|
||||
query = "SELECT COUNT(*) AS num FROM characterSkills WHERE characterID NOT IN (SELECT ID from characters)"
|
||||
results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
|
||||
if results is None:
|
||||
return
|
||||
|
||||
row = results.first()
|
||||
|
||||
if row and row['num']:
|
||||
query = "DELETE FROM characterSkills WHERE characterID NOT IN (SELECT ID from characters)"
|
||||
delete = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
pyfalog.error("Database corruption found. Cleaning up {0} records.", delete.rowcount)
|
||||
|
||||
@staticmethod
|
||||
def OrphanedFitDamagePatterns(saveddata_engine):
|
||||
# Find orphaned damage patterns.
|
||||
# This solves an issue where the damage pattern doesn't exist, but fits reference the pattern.
|
||||
# See issue #777
|
||||
pyfalog.debug("Running database cleanup for orphaned damage patterns attached to fits.")
|
||||
query = "SELECT COUNT(*) AS num FROM fits WHERE damagePatternID NOT IN (SELECT ID FROM damagePatterns) OR damagePatternID IS NULL"
|
||||
results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
|
||||
if results is None:
|
||||
return
|
||||
|
||||
row = results.first()
|
||||
|
||||
if row and row['num']:
|
||||
# Get Uniform damage pattern ID
|
||||
uniform_query = "SELECT ID FROM damagePatterns WHERE name = 'Uniform'"
|
||||
uniform_results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, uniform_query)
|
||||
|
||||
if uniform_results is None:
|
||||
return
|
||||
|
||||
rows = uniform_results.fetchall()
|
||||
|
||||
if len(rows) == 0:
|
||||
pyfalog.error("Missing uniform damage pattern.")
|
||||
elif len(rows) > 1:
|
||||
pyfalog.error("More than one uniform damage pattern found.")
|
||||
else:
|
||||
uniform_damage_pattern_id = rows[0]['ID']
|
||||
update_query = "UPDATE 'fits' SET 'damagePatternID' = {} " \
|
||||
"WHERE damagePatternID NOT IN (SELECT ID FROM damagePatterns) OR damagePatternID IS NULL".format(uniform_damage_pattern_id)
|
||||
update_results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, update_query)
|
||||
pyfalog.error("Database corruption found. Cleaning up {0} records.", update_results.rowcount)
|
||||
|
||||
@staticmethod
|
||||
def OrphanedFitCharacterIDs(saveddata_engine):
|
||||
# Find orphaned character IDs. This solves an issue where the character doesn't exist, but fits reference the pattern.
|
||||
pyfalog.debug("Running database cleanup for orphaned characters attached to fits.")
|
||||
query = "SELECT COUNT(*) AS num FROM fits WHERE characterID NOT IN (SELECT ID FROM characters) OR characterID IS NULL"
|
||||
results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
|
||||
if results is None:
|
||||
return
|
||||
|
||||
row = results.first()
|
||||
|
||||
if row and row['num']:
|
||||
# Get All 5 character ID
|
||||
all5_query = "SELECT ID FROM characters WHERE name = 'All 5'"
|
||||
all5_results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, all5_query)
|
||||
|
||||
if all5_results is None:
|
||||
return
|
||||
|
||||
rows = all5_results.fetchall()
|
||||
|
||||
if len(rows) == 0:
|
||||
pyfalog.error("Missing 'All 5' character.")
|
||||
elif len(rows) > 1:
|
||||
pyfalog.error("More than one 'All 5' character found.")
|
||||
else:
|
||||
all5_id = rows[0]['ID']
|
||||
update_query = "UPDATE 'fits' SET 'characterID' = " + str(all5_id) + \
|
||||
" WHERE characterID not in (select ID from characters) OR characterID IS NULL"
|
||||
update_results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, update_query)
|
||||
pyfalog.error("Database corruption found. Cleaning up {0} records.", update_results.rowcount)
|
||||
|
||||
@staticmethod
|
||||
def NullDamagePatternNames(saveddata_engine):
|
||||
# Find damage patterns that are missing the name.
|
||||
# This solves an issue where the damage pattern ends up with a name that is null.
|
||||
# See issue #949
|
||||
pyfalog.debug("Running database cleanup for missing damage pattern names.")
|
||||
query = "SELECT COUNT(*) AS num FROM damagePatterns WHERE name IS NULL OR name = ''"
|
||||
results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
|
||||
if results is None:
|
||||
return
|
||||
|
||||
row = results.first()
|
||||
|
||||
if row and row['num']:
|
||||
query = "DELETE FROM damagePatterns WHERE name IS NULL OR name = ''"
|
||||
delete = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
pyfalog.error("Database corruption found. Cleaning up {0} records.", delete.rowcount)
|
||||
|
||||
@staticmethod
|
||||
def NullTargetResistNames(saveddata_engine):
|
||||
# Find target resists that are missing the name.
|
||||
# This solves an issue where the target resist ends up with a name that is null.
|
||||
# See issue #949
|
||||
pyfalog.debug("Running database cleanup for missing target resist names.")
|
||||
query = "SELECT COUNT(*) AS num FROM targetResists WHERE name IS NULL OR name = ''"
|
||||
results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
|
||||
if results is None:
|
||||
return
|
||||
|
||||
row = results.first()
|
||||
|
||||
if row and row['num']:
|
||||
query = "DELETE FROM targetResists WHERE name IS NULL OR name = ''"
|
||||
delete = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
pyfalog.error("Database corruption found. Cleaning up {0} records.", delete.rowcount)
|
||||
|
||||
@staticmethod
|
||||
def OrphanedFitIDItemID(saveddata_engine):
|
||||
# Orphaned items that are missing the fit ID or item ID.
|
||||
# See issue #954
|
||||
for table in ['drones', 'cargo', 'fighters']:
|
||||
pyfalog.debug("Running database cleanup for orphaned {0} items.", table)
|
||||
query = "SELECT COUNT(*) AS num FROM {} WHERE itemID IS NULL OR itemID = '' or itemID = '0' or fitID IS NULL OR fitID = '' or fitID = '0'".format(
|
||||
table)
|
||||
results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
|
||||
if results is None:
|
||||
return
|
||||
|
||||
row = results.first()
|
||||
|
||||
if row and row['num']:
|
||||
query = "DELETE FROM {} WHERE itemID IS NULL OR itemID = '' or itemID = '0' or fitID IS NULL OR fitID = '' or fitID = '0'".format(
|
||||
table)
|
||||
delete = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
pyfalog.error("Database corruption found. Cleaning up {0} records.", delete.rowcount)
|
||||
|
||||
for table in ['modules']:
|
||||
pyfalog.debug("Running database cleanup for orphaned {0} items.", table)
|
||||
query = "SELECT COUNT(*) AS num FROM {} WHERE itemID = '0' or fitID IS NULL OR fitID = '' or fitID = '0'".format(
|
||||
table)
|
||||
results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
|
||||
if results is None:
|
||||
return
|
||||
|
||||
row = results.first()
|
||||
|
||||
if row and row['num']:
|
||||
query = "DELETE FROM {} WHERE itemID = '0' or fitID IS NULL OR fitID = '' or fitID = '0'".format(table)
|
||||
delete = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
pyfalog.error("Database corruption found. Cleaning up {0} records.", delete.rowcount)
|
||||
|
||||
@staticmethod
|
||||
def NullDamageTargetPatternValues(saveddata_engine):
|
||||
# Find patterns that have null values
|
||||
# See issue #954
|
||||
for profileType in ['damagePatterns', 'targetResists']:
|
||||
for damageType in ['em', 'thermal', 'kinetic', 'explosive']:
|
||||
pyfalog.debug("Running database cleanup for null {0} values. ({1})", profileType, damageType)
|
||||
query = "SELECT COUNT(*) AS num FROM {0} WHERE {1}Amount IS NULL OR {1}Amount = ''".format(profileType,
|
||||
damageType)
|
||||
results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
|
||||
if results is None:
|
||||
return
|
||||
|
||||
row = results.first()
|
||||
|
||||
if row and row['num']:
|
||||
query = "UPDATE '{0}' SET '{1}Amount' = '0' WHERE {1}Amount IS NULL OR {1}Amount = ''".format(profileType,
|
||||
damageType)
|
||||
delete = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
pyfalog.error("Database corruption found. Cleaning up {0} records.", delete.rowcount)
|
||||
|
||||
@staticmethod
|
||||
def DuplicateSelectedAmmoName(saveddata_engine):
|
||||
# Orphaned items that are missing the fit ID or item ID.
|
||||
# See issue #954
|
||||
pyfalog.debug("Running database cleanup for duplicated selected ammo profiles.")
|
||||
query = "SELECT COUNT(*) AS num FROM damagePatterns WHERE name = 'Selected Ammo'"
|
||||
results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
|
||||
if results is None:
|
||||
return
|
||||
|
||||
row = results.first()
|
||||
|
||||
if row and row['num'] > 1:
|
||||
query = "DELETE FROM damagePatterns WHERE name = 'Selected Ammo'"
|
||||
delete = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
pyfalog.error("Database corruption found. Cleaning up {0} records.", delete.rowcount)
|
||||
@@ -1,4 +1,4 @@
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
@@ -15,29 +15,19 @@
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, Boolean, DateTime
|
||||
from sqlalchemy.orm import mapper, relation
|
||||
import datetime
|
||||
#===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, Boolean
|
||||
from sqlalchemy.orm import mapper
|
||||
from eos.db import saveddata_meta
|
||||
from eos.saveddata.drone import Drone
|
||||
from eos.saveddata.fit import Fit
|
||||
from eos.types import Drone
|
||||
|
||||
drones_table = Table("drones", saveddata_meta,
|
||||
Column("groupID", Integer, primary_key=True),
|
||||
Column("fitID", Integer, ForeignKey("fits.ID"), nullable=False, index=True),
|
||||
Column("itemID", Integer, nullable=False),
|
||||
Column("amount", Integer, nullable=False),
|
||||
Column("amountActive", Integer, nullable=False),
|
||||
Column("projected", Boolean, default=False),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now)
|
||||
)
|
||||
Column("fitID", Integer, ForeignKey("fits.ID"), nullable = False, index = True),
|
||||
Column("itemID", Integer, nullable = False),
|
||||
Column("amount", Integer, nullable = False),
|
||||
Column("amountActive", Integer, nullable = False),
|
||||
Column("projected", Boolean, default = False))
|
||||
|
||||
mapper(Drone, drones_table,
|
||||
properties={
|
||||
"owner": relation(Fit)
|
||||
}
|
||||
)
|
||||
mapper(Drone, drones_table)
|
||||
|
||||
@@ -1,56 +0,0 @@
|
||||
# ===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
#
|
||||
# eos is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# eos is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, Boolean, DateTime
|
||||
from sqlalchemy.orm import mapper, relation
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.saveddata.fighterAbility import FighterAbility
|
||||
from eos.saveddata.fighter import Fighter
|
||||
from eos.saveddata.fit import Fit
|
||||
|
||||
fighters_table = Table("fighters", saveddata_meta,
|
||||
Column("groupID", Integer, primary_key=True),
|
||||
Column("fitID", Integer, ForeignKey("fits.ID"), nullable=False, index=True),
|
||||
Column("itemID", Integer, nullable=False),
|
||||
Column("active", Boolean, nullable=True),
|
||||
Column("amount", Integer, nullable=False),
|
||||
Column("projected", Boolean, default=False),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now)
|
||||
)
|
||||
|
||||
fighter_abilities_table = Table("fightersAbilities", saveddata_meta,
|
||||
Column("groupID", Integer, ForeignKey("fighters.groupID"), primary_key=True,
|
||||
index=True),
|
||||
Column("effectID", Integer, nullable=False, primary_key=True),
|
||||
Column("active", Boolean, default=False))
|
||||
|
||||
mapper(Fighter, fighters_table,
|
||||
properties={
|
||||
"owner" : relation(Fit),
|
||||
"_amount" : fighters_table.c.amount,
|
||||
"_Fighter__abilities": relation(
|
||||
FighterAbility,
|
||||
backref="fighter",
|
||||
cascade='all, delete, delete-orphan'),
|
||||
})
|
||||
|
||||
mapper(FighterAbility, fighter_abilities_table)
|
||||
@@ -1,4 +1,4 @@
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
@@ -15,245 +15,63 @@
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
|
||||
import datetime
|
||||
|
||||
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, Table
|
||||
from sqlalchemy.ext.associationproxy import association_proxy
|
||||
from sqlalchemy.orm import mapper, reconstructor, relation, relationship
|
||||
from sqlalchemy.orm.collections import attribute_mapped_collection
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, String, Boolean
|
||||
from sqlalchemy.orm import relation, mapper
|
||||
from sqlalchemy.sql import and_
|
||||
|
||||
from eos.db import saveddata_meta, saveddata_session
|
||||
from eos.db.saveddata.cargo import cargo_table
|
||||
from eos.db.saveddata.drone import drones_table
|
||||
from eos.db.saveddata.fighter import fighters_table
|
||||
from eos.db.saveddata.implant import fitImplants_table
|
||||
from eos.db import saveddata_meta
|
||||
from eos.db.saveddata.module import modules_table
|
||||
from eos.effectHandlerHelpers import HandledDroneCargoList, HandledImplantList, HandledBoosterList, HandledModuleList, HandledProjectedDroneList, HandledProjectedModList
|
||||
from eos.saveddata.booster import Booster
|
||||
from eos.saveddata.cargo import Cargo
|
||||
from eos.saveddata.character import Character
|
||||
from eos.saveddata.damagePattern import DamagePattern
|
||||
from eos.saveddata.drone import Drone
|
||||
from eos.saveddata.fighter import Fighter
|
||||
from eos.saveddata.fit import Fit as es_Fit
|
||||
from eos.saveddata.implant import Implant
|
||||
from eos.saveddata.module import Module
|
||||
from eos.saveddata.targetProfile import TargetProfile
|
||||
from eos.saveddata.user import User
|
||||
|
||||
from eos.db.saveddata.drone import drones_table
|
||||
from eos.db.saveddata.cargo import cargo_table
|
||||
from eos.db.saveddata.implant import fitImplants_table
|
||||
from eos.types import Fit, Module, User, Booster, Drone, Cargo, Implant, Character, DamagePattern, TargetResists
|
||||
from eos.effectHandlerHelpers import HandledModuleList, HandledDroneList, \
|
||||
HandledImplantBoosterList, HandledProjectedModList, HandledProjectedDroneList, \
|
||||
HandledProjectedFitList, HandledCargoList
|
||||
|
||||
fits_table = Table("fits", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("ownerID", ForeignKey("users.ID"), nullable=True, index=True),
|
||||
Column("shipID", Integer, nullable=False, index=True),
|
||||
Column("name", String, nullable=False),
|
||||
Column("timestamp", Integer, nullable=False),
|
||||
Column("characterID", ForeignKey("characters.ID"), nullable=True),
|
||||
Column("damagePatternID", ForeignKey("damagePatterns.ID"), nullable=True),
|
||||
Column("booster", Boolean, nullable=False, index=True, default=0),
|
||||
Column("targetResistsID", ForeignKey("targetResists.ID"), nullable=True),
|
||||
Column("modeID", Integer, nullable=True),
|
||||
Column("implantLocation", Integer, nullable=False),
|
||||
Column("notes", String, nullable=True),
|
||||
Column("ignoreRestrictions", Boolean, default=0),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, default=datetime.datetime.now, onupdate=datetime.datetime.now),
|
||||
Column("systemSecurity", Integer, nullable=True)
|
||||
)
|
||||
Column("ID", Integer, primary_key = True),
|
||||
Column("ownerID", ForeignKey("users.ID"), nullable = True, index = True),
|
||||
Column("shipID", Integer, nullable = False, index = True),
|
||||
Column("name", String, nullable = False),
|
||||
Column("timestamp", Integer, nullable = False),
|
||||
Column("characterID", ForeignKey("characters.ID"), nullable = True),
|
||||
Column("damagePatternID", ForeignKey("damagePatterns.ID"), nullable=True),
|
||||
Column("booster", Boolean, nullable = False, index = True, default = 0),
|
||||
Column("targetResistsID", ForeignKey("targetResists.ID"), nullable=True),
|
||||
Column("modeID", Integer, nullable=True),
|
||||
)
|
||||
|
||||
projectedFits_table = Table("projectedFits", saveddata_meta,
|
||||
Column("sourceID", ForeignKey("fits.ID"), primary_key=True),
|
||||
Column("victimID", ForeignKey("fits.ID"), primary_key=True),
|
||||
Column("amount", Integer, nullable=False, default=1),
|
||||
Column("active", Boolean, nullable=False, default=1),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now)
|
||||
)
|
||||
|
||||
commandFits_table = Table("commandFits", saveddata_meta,
|
||||
Column("boosterID", ForeignKey("fits.ID"), primary_key=True),
|
||||
Column("boostedID", ForeignKey("fits.ID"), primary_key=True),
|
||||
Column("active", Boolean, nullable=False, default=1),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now)
|
||||
)
|
||||
|
||||
|
||||
class ProjectedFit:
|
||||
def __init__(self, sourceID, source_fit, amount=1, active=True):
|
||||
self.sourceID = sourceID
|
||||
self.source_fit = source_fit
|
||||
self.active = active
|
||||
self.__amount = amount
|
||||
|
||||
@reconstructor
|
||||
def init(self):
|
||||
if self.source_fit.isInvalid:
|
||||
# Very rare for this to happen, but be prepared for it
|
||||
saveddata_session.delete(self.source_fit)
|
||||
saveddata_session.flush()
|
||||
saveddata_session.refresh(self.victim_fit)
|
||||
|
||||
# We have a series of setters and getters here just in case someone
|
||||
# downgrades and screws up the table with NULL values
|
||||
@property
|
||||
def amount(self):
|
||||
return self.__amount or 1
|
||||
|
||||
@amount.setter
|
||||
def amount(self, amount):
|
||||
self.__amount = amount
|
||||
|
||||
def __repr__(self):
|
||||
return "ProjectedFit(sourceID={}, victimID={}, amount={}, active={}) at {}".format(
|
||||
self.sourceID, self.victimID, self.amount, self.active, hex(id(self))
|
||||
)
|
||||
|
||||
|
||||
class CommandFit:
|
||||
def __init__(self, boosterID, booster_fit, active=True):
|
||||
self.boosterID = boosterID
|
||||
self.booster_fit = booster_fit
|
||||
self.active = active
|
||||
|
||||
@reconstructor
|
||||
def init(self):
|
||||
if self.booster_fit.isInvalid:
|
||||
# Very rare for this to happen, but be prepared for it
|
||||
saveddata_session.delete(self.booster_fit)
|
||||
saveddata_session.flush()
|
||||
saveddata_session.refresh(self.boosted_fit)
|
||||
|
||||
def __repr__(self):
|
||||
return "CommandFit(boosterID={}, boostedID={}, active={}) at {}".format(
|
||||
self.boosterID, self.boostedID, self.active, hex(id(self))
|
||||
)
|
||||
|
||||
|
||||
es_Fit.projectedFitDict = association_proxy(
|
||||
"victimOf", # look at the victimOf association...
|
||||
"source_fit", # .. and return the source fits
|
||||
creator=lambda sourceID, source_fit: ProjectedFit(sourceID, source_fit)
|
||||
)
|
||||
|
||||
es_Fit.commandFitDict = association_proxy(
|
||||
"boostedOf", # look at the boostedOf association...
|
||||
"booster_fit", # .. and return the booster fit
|
||||
creator=lambda boosterID, booster_fit: CommandFit(boosterID, booster_fit)
|
||||
)
|
||||
|
||||
|
||||
# These relationships are broken out so that we can easily access it in the events stuff
|
||||
# We sometimes don't want particular relationships to cause a fit modified update (eg: projecting
|
||||
# a fit onto another would 'modify' both fits unless the following relationship is ignored)
|
||||
projectedFitSourceRel = relationship(
|
||||
ProjectedFit,
|
||||
primaryjoin=projectedFits_table.c.sourceID == fits_table.c.ID,
|
||||
backref='source_fit',
|
||||
collection_class=attribute_mapped_collection('victimID'),
|
||||
cascade='all, delete, delete-orphan')
|
||||
|
||||
|
||||
boostedOntoRel = relationship(
|
||||
CommandFit,
|
||||
primaryjoin=commandFits_table.c.boosterID == fits_table.c.ID,
|
||||
backref='booster_fit',
|
||||
collection_class=attribute_mapped_collection('boostedID'),
|
||||
cascade='all, delete, delete-orphan')
|
||||
|
||||
mapper(es_Fit, fits_table,
|
||||
properties={
|
||||
"_Fit__modules": relation(
|
||||
Module,
|
||||
collection_class=HandledModuleList,
|
||||
primaryjoin=and_(modules_table.c.fitID == fits_table.c.ID, modules_table.c.projected == False), # noqa
|
||||
order_by=modules_table.c.position,
|
||||
cascade='all, delete, delete-orphan'),
|
||||
"_Fit__projectedModules": relation(
|
||||
Module,
|
||||
collection_class=HandledProjectedModList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(modules_table.c.fitID == fits_table.c.ID, modules_table.c.projected == True)), # noqa
|
||||
"owner": relation(
|
||||
User,
|
||||
backref="fits"),
|
||||
"itemID": fits_table.c.shipID,
|
||||
"shipID": fits_table.c.shipID,
|
||||
"_Fit__boosters": relation(
|
||||
Booster,
|
||||
collection_class=HandledBoosterList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
backref='owner',
|
||||
single_parent=True),
|
||||
"_Fit__drones": relation(
|
||||
Drone,
|
||||
collection_class=HandledDroneCargoList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(drones_table.c.fitID == fits_table.c.ID, drones_table.c.projected == False)), # noqa
|
||||
"_Fit__fighters": relation(
|
||||
Fighter,
|
||||
collection_class=HandledDroneCargoList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(fighters_table.c.fitID == fits_table.c.ID, fighters_table.c.projected == False)), # noqa
|
||||
"_Fit__cargo": relation(
|
||||
Cargo,
|
||||
collection_class=HandledDroneCargoList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(cargo_table.c.fitID == fits_table.c.ID)),
|
||||
"_Fit__projectedDrones": relation(
|
||||
Drone,
|
||||
collection_class=HandledProjectedDroneList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(drones_table.c.fitID == fits_table.c.ID, drones_table.c.projected == True)), # noqa
|
||||
"_Fit__projectedFighters": relation(
|
||||
Fighter,
|
||||
collection_class=HandledProjectedDroneList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(fighters_table.c.fitID == fits_table.c.ID, fighters_table.c.projected == True)), # noqa
|
||||
"_Fit__implants": relation(
|
||||
Implant,
|
||||
collection_class=HandledImplantList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
backref='owner',
|
||||
single_parent=True,
|
||||
primaryjoin=fitImplants_table.c.fitID == fits_table.c.ID,
|
||||
secondaryjoin=fitImplants_table.c.implantID == Implant.ID,
|
||||
secondary=fitImplants_table),
|
||||
"_Fit__character": relation(
|
||||
Character,
|
||||
backref="fits"),
|
||||
"_Fit__damagePattern": relation(DamagePattern),
|
||||
"_Fit__targetProfile": relation(TargetProfile),
|
||||
"projectedOnto": projectedFitSourceRel,
|
||||
"victimOf": relationship(
|
||||
ProjectedFit,
|
||||
primaryjoin=fits_table.c.ID == projectedFits_table.c.victimID,
|
||||
backref='victim_fit',
|
||||
collection_class=attribute_mapped_collection('sourceID'),
|
||||
cascade='all, delete, delete-orphan'),
|
||||
"boostedOnto": boostedOntoRel,
|
||||
"boostedOf": relationship(
|
||||
CommandFit,
|
||||
primaryjoin=fits_table.c.ID == commandFits_table.c.boostedID,
|
||||
backref='boosted_fit',
|
||||
collection_class=attribute_mapped_collection('boosterID'),
|
||||
cascade='all, delete, delete-orphan'),
|
||||
}
|
||||
)
|
||||
|
||||
mapper(ProjectedFit, projectedFits_table,
|
||||
properties={
|
||||
"_ProjectedFit__amount": projectedFits_table.c.amount,
|
||||
}
|
||||
)
|
||||
|
||||
mapper(CommandFit, commandFits_table)
|
||||
Column("sourceID", ForeignKey("fits.ID"), primary_key = True),
|
||||
Column("victimID", ForeignKey("fits.ID"), primary_key = True),
|
||||
Column("amount", Integer))
|
||||
mapper(Fit, fits_table,
|
||||
properties = {"_Fit__modules" : relation(Module, collection_class = HandledModuleList,
|
||||
primaryjoin = and_(modules_table.c.fitID == fits_table.c.ID, modules_table.c.projected == False),
|
||||
order_by = modules_table.c.position, cascade='all, delete, delete-orphan'),
|
||||
"_Fit__projectedModules" : relation(Module, collection_class = HandledProjectedModList, cascade='all, delete, delete-orphan', single_parent=True,
|
||||
primaryjoin = and_(modules_table.c.fitID == fits_table.c.ID, modules_table.c.projected == True)),
|
||||
"owner" : relation(User, backref = "fits"),
|
||||
"_Fit__boosters" : relation(Booster, collection_class = HandledImplantBoosterList, cascade='all, delete, delete-orphan', single_parent=True),
|
||||
"_Fit__drones" : relation(Drone, collection_class = HandledDroneList, cascade='all, delete, delete-orphan', single_parent=True,
|
||||
primaryjoin = and_(drones_table.c.fitID == fits_table.c.ID, drones_table.c.projected == False)),
|
||||
"_Fit__cargo" : relation(Cargo, collection_class = HandledCargoList, cascade='all, delete, delete-orphan', single_parent=True,
|
||||
primaryjoin = and_(cargo_table.c.fitID == fits_table.c.ID)),
|
||||
"_Fit__projectedDrones" : relation(Drone, collection_class = HandledProjectedDroneList, cascade='all, delete, delete-orphan', single_parent=True,
|
||||
primaryjoin = and_(drones_table.c.fitID == fits_table.c.ID, drones_table.c.projected == True)),
|
||||
"_Fit__implants" : relation(Implant, collection_class = HandledImplantBoosterList, cascade='all, delete, delete-orphan', single_parent=True,
|
||||
primaryjoin = fitImplants_table.c.fitID == fits_table.c.ID,
|
||||
secondaryjoin = fitImplants_table.c.implantID == Implant.ID,
|
||||
secondary = fitImplants_table),
|
||||
"_Fit__character" : relation(Character, backref = "fits"),
|
||||
"_Fit__damagePattern" : relation(DamagePattern),
|
||||
"_Fit__targetResists" : relation(TargetResists),
|
||||
"_Fit__projectedFits" : relation(Fit,
|
||||
primaryjoin = projectedFits_table.c.victimID == fits_table.c.ID,
|
||||
secondaryjoin = fits_table.c.ID == projectedFits_table.c.sourceID,
|
||||
secondary = projectedFits_table,
|
||||
collection_class = HandledProjectedFitList)
|
||||
})
|
||||
|
||||
66
eos/db/saveddata/fleet.py
Normal file
66
eos/db/saveddata/fleet.py
Normal file
@@ -0,0 +1,66 @@
|
||||
#===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
#
|
||||
# eos is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# eos is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
#===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, String
|
||||
from sqlalchemy.orm import mapper, relation
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.types import Fleet, Wing, Squad, Fit
|
||||
from eos.db.saveddata.fit import fits_table
|
||||
|
||||
gangs_table = Table("gangs", saveddata_meta,
|
||||
Column("ID", Integer, primary_key = True),
|
||||
Column("leaderID", ForeignKey("fits.ID")),
|
||||
Column("boosterID", ForeignKey("fits.ID")),
|
||||
Column("name", String))
|
||||
|
||||
wings_table = Table("wings", saveddata_meta,
|
||||
Column("ID", Integer, primary_key = True),
|
||||
Column("gangID", ForeignKey("gangs.ID")),
|
||||
Column("boosterID", ForeignKey("fits.ID")),
|
||||
Column("leaderID", ForeignKey("fits.ID")))
|
||||
|
||||
squads_table = Table("squads", saveddata_meta,
|
||||
Column("ID", Integer, primary_key = True),
|
||||
Column("wingID", ForeignKey("wings.ID")),
|
||||
Column("leaderID", ForeignKey("fits.ID")),
|
||||
Column("boosterID", ForeignKey("fits.ID")))
|
||||
|
||||
squadmembers_table = Table("squadmembers", saveddata_meta,
|
||||
Column("squadID", ForeignKey("squads.ID"), primary_key = True),
|
||||
Column("memberID", ForeignKey("fits.ID"), primary_key = True))
|
||||
|
||||
mapper(Fleet, gangs_table,
|
||||
properties = {"wings" : relation(Wing, backref="gang"),
|
||||
"leader" : relation(Fit, primaryjoin = gangs_table.c.leaderID == fits_table.c.ID),
|
||||
"booster": relation(Fit, primaryjoin = gangs_table.c.boosterID == fits_table.c.ID)})
|
||||
|
||||
mapper(Wing, wings_table,
|
||||
properties = {"squads" : relation(Squad, backref="wing"),
|
||||
"leader" : relation(Fit, primaryjoin = wings_table.c.leaderID == fits_table.c.ID),
|
||||
"booster": relation(Fit, primaryjoin = wings_table.c.boosterID == fits_table.c.ID)})
|
||||
|
||||
mapper(Squad, squads_table,
|
||||
properties = {"leader" : relation(Fit, primaryjoin = squads_table.c.leaderID == fits_table.c.ID),
|
||||
"booster" : relation(Fit, primaryjoin = squads_table.c.boosterID == fits_table.c.ID),
|
||||
"members" : relation(Fit,
|
||||
primaryjoin = squads_table.c.ID == squadmembers_table.c.squadID,
|
||||
secondaryjoin = squadmembers_table.c.memberID == fits_table.c.ID,
|
||||
secondary = squadmembers_table)})
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
@@ -15,33 +15,25 @@
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, Boolean, DateTime
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, Boolean
|
||||
from sqlalchemy.orm import mapper
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.saveddata.implant import Implant
|
||||
from eos.types import Implant
|
||||
|
||||
implants_table = Table("implants", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("itemID", Integer),
|
||||
Column("active", Boolean),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now)
|
||||
)
|
||||
Column("ID", Integer, primary_key = True),
|
||||
Column("itemID", Integer),
|
||||
Column("active", Boolean))
|
||||
|
||||
fitImplants_table = Table("fitImplants", saveddata_meta,
|
||||
Column("fitID", ForeignKey("fits.ID"), index=True),
|
||||
Column("implantID", ForeignKey("implants.ID"), primary_key=True))
|
||||
Column("fitID", ForeignKey("fits.ID"), index = True),
|
||||
Column("implantID", ForeignKey("implants.ID"), primary_key = True))
|
||||
|
||||
charImplants_table = Table("charImplants", saveddata_meta,
|
||||
Column("charID", ForeignKey("characters.ID"), index=True),
|
||||
Column("implantID", ForeignKey("implants.ID"), primary_key=True))
|
||||
|
||||
implantsSetMap_table = Table("implantSetMap", saveddata_meta,
|
||||
Column("setID", ForeignKey("implantSets.ID"), index=True),
|
||||
Column("implantID", ForeignKey("implants.ID"), primary_key=True))
|
||||
Column("charID", ForeignKey("characters.ID"), index = True),
|
||||
Column("implantID", ForeignKey("implants.ID"), primary_key = True))
|
||||
|
||||
mapper(Implant, implants_table)
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
# ===============================================================================
|
||||
# Copyright (C) 2016 Ryan Holmes
|
||||
#
|
||||
# This file is part of eos.
|
||||
#
|
||||
# eos is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# eos is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, String, DateTime
|
||||
from sqlalchemy.orm import relation, mapper
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.db.saveddata.implant import implantsSetMap_table
|
||||
from eos.effectHandlerHelpers import HandledImplantList
|
||||
from eos.saveddata.implant import Implant
|
||||
from eos.saveddata.implantSet import ImplantSet
|
||||
|
||||
implant_set_table = Table("implantSets", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("name", String, nullable=False),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now)
|
||||
)
|
||||
|
||||
mapper(ImplantSet, implant_set_table,
|
||||
properties={
|
||||
"_ImplantSet__implants": relation(
|
||||
Implant,
|
||||
collection_class=HandledImplantList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
backref='set',
|
||||
single_parent=True,
|
||||
primaryjoin=implantsSetMap_table.c.setID == implant_set_table.c.ID,
|
||||
secondaryjoin=implantsSetMap_table.c.implantID == Implant.ID,
|
||||
secondary=implantsSetMap_table),
|
||||
}
|
||||
)
|
||||
@@ -1,223 +0,0 @@
|
||||
# ===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of pyfa.
|
||||
#
|
||||
# pyfa is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# pyfa is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
import eos.db
|
||||
from eos.saveddata.damagePattern import DamagePattern as es_DamagePattern
|
||||
from eos.saveddata.targetProfile import TargetProfile as es_TargetProfile
|
||||
|
||||
|
||||
class ImportError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DefaultDatabaseValues:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
instance = None
|
||||
|
||||
@classmethod
|
||||
def importDamageProfileDefaults(cls):
|
||||
damageProfileList = [["Uniform", "25", "25", "25", "25"], ["[Generic]EM", "100", "0", "0", "0"],
|
||||
["[Generic]Thermal", "0", "100", "0", "0"], ["[Generic]Kinetic", "0", "0", "100", "0"],
|
||||
["[Generic]Explosive", "0", "0", "0", "100"],
|
||||
["[NPC][Asteroid] Blood Raiders", "5067", "4214", "0", "0"],
|
||||
["[Bombs]Electron Bomb", "6400", "0", "0", "0"],
|
||||
["[Bombs]Scorch Bomb", "0", "6400", "0", "0"],
|
||||
["[Bombs]Concussion Bomb", "0", "0", "6400", "0"],
|
||||
["[Bombs]Shrapnel Bomb", "0", "0", "0", "6400"],
|
||||
["[Frequency Crystals][T2] Conflagration", "61.6", "61.6", "0", "0"],
|
||||
["[Frequency Crystals][T2] Scorch", "72", "16", "0", "0"],
|
||||
["[Frequency Crystals][T2] Gleam", "56", "56", "0", "0"],
|
||||
["[Frequency Crystals][T2] Aurora", "40", "24", "0", "0"],
|
||||
["[Frequency Crystals]Multifrequency", "61.6", "44", "0", "0"],
|
||||
["[Frequency Crystals]Gamma", "61.6", "35.2", "0", "0"],
|
||||
["[Frequency Crystals]Xray", "52.8", "35.2", "0", "0"],
|
||||
["[Frequency Crystals]Ultraviolet", "52.8", "26.4", "0", "0"],
|
||||
["[Frequency Crystals]Standard", "44", "26.4", "0", "0"],
|
||||
["[Frequency Crystals]Infrared", "44", "17.6", "0", "0"],
|
||||
["[Frequency Crystals]Microwave", "35.2", "17.6", "0", "0"],
|
||||
["[Frequency Crystals]Radio", "44", "0", "0", "0"],
|
||||
["[Hybrid Charges][T2] Void", "0", "61.6", "61.6", "0"],
|
||||
["[Hybrid Charges][T2] Null", "0", "48", "40", "0"],
|
||||
["[Hybrid Charges][T2] Javelin", "0", "64", "48", "0"],
|
||||
["[Hybrid Charges][T2] Spike", "0", "32", "32", "0"],
|
||||
["[Hybrid Charges]Antimatter", "0", "48", "67.2", "0"],
|
||||
["[Hybrid Charges]Plutonium", "0", "48", "57.6", "0"],
|
||||
["[Hybrid Charges]Uranium", "0", "38.4", "57.6", "0"],
|
||||
["[Hybrid Charges]Thorium", "0", "38.4", "48", "0"],
|
||||
["[Hybrid Charges]Lead", "0", "28.8", "48", "0"],
|
||||
["[Hybrid Charges]Iridium", "0", "28.8", "38.4", "0"],
|
||||
["[Hybrid Charges]Tungsten", "0", "19.2", "38.4", "0"],
|
||||
["[Hybrid Charges]Iron", "0", "19.2", "28.8", "0"],
|
||||
["[Missiles]Mjolnir", "100", "0", "0", "0"],
|
||||
["[Missiles]Inferno", "0", "100", "0", "0"],
|
||||
["[Missiles]Scourge", "0", "0", "100", "0"],
|
||||
["[Missiles]Nova", "0", "0", "0", "100"],
|
||||
["[Missiles][Structure] Standup Missile", "100", "100", "100", "100"],
|
||||
["[Projectile Ammo][T2] Hail", "0", "0", "26.4", "96.8"],
|
||||
["[Projectile Ammo][T2] Barrage", "0", "0", "40", "48"],
|
||||
["[Projectile Ammo][T2] Quake", "0", "0", "40", "72"],
|
||||
["[Projectile Ammo][T2] Tremor", "0", "0", "24", "40"],
|
||||
["[Projectile Ammo]EMP", "79.2", "0", "8.8", "17.6"],
|
||||
["[Projectile Ammo]Phased Plasma", "0", "88", "17.6", "0"],
|
||||
["[Projectile Ammo]Fusion", "0", "0", "17.6", "88"],
|
||||
["[Projectile Ammo]Depleted Uranium", "0", "26.4", "17.6", "26.4"],
|
||||
["[Projectile Ammo]Titanium Sabot", "0", "0", "52.8", "176"],
|
||||
["[Projectile Ammo]Proton", "26.4", "0", "17.6", "0"],
|
||||
["[Projectile Ammo]Carbonized Lead", "0", "0", "35.2", "8.8"],
|
||||
["[Projectile Ammo]Nuclear", "0", "0", "8.8", "35.2"],
|
||||
# Different sizes of plasma do different damage, the values here are
|
||||
# average of proportions across sizes
|
||||
["[Exotic Plasma][T2] Occult", "0", "55863", "0", "44137"],
|
||||
["[Exotic Plasma][T2] Mystic", "0", "66319", "0", "33681"],
|
||||
["[Exotic Plasma]Tetryon", "0", "69208", "0", "30792"],
|
||||
["[Exotic Plasma]Baryon", "0", "59737", "0", "40263"],
|
||||
["[Exotic Plasma]Meson", "0", "60519", "0", "39481"],
|
||||
["[NPC][Burner] Cruor (Blood Raiders)", "90", "90", "0", "0"],
|
||||
["[NPC][Burner] Dramiel (Angel)", "55", "0", "20", "96"],
|
||||
["[NPC][Burner] Daredevil (Serpentis)", "0", "110", "154", "0"],
|
||||
["[NPC][Burner] Succubus (Sanshas Nation)", "135", "30", "0", "0"],
|
||||
["[NPC][Burner] Worm (Guristas)", "0", "0", "228", "0"],
|
||||
["[NPC][Burner] Enyo", "0", "147", "147", "0"],
|
||||
["[NPC][Burner] Hawk", "0", "0", "247", "0"],
|
||||
["[NPC][Burner] Jaguar", "36", "0", "50", "182"],
|
||||
["[NPC][Burner] Vengeance", "232", "0", "0", "0"],
|
||||
["[NPC][Burner] Ashimmu (Blood Raiders)", "260", "100", "0", "0"],
|
||||
["[NPC][Burner] Talos", "0", "413", "413", "0"],
|
||||
["[NPC][Burner] Sentinel", "0", "75", "0", "90"],
|
||||
["[NPC][Asteroid] Angel Cartel", "1838", "562", "2215", "3838"],
|
||||
["[NPC][Deadspace] Angel Cartel", "369", "533", "1395", "3302"],
|
||||
["[NPC][Deadspace] Blood Raiders", "6040", "5052", "10", "15"],
|
||||
["[NPC][Asteroid] Guristas", "0", "1828", "7413", "0"],
|
||||
["[NPC][Deadspace] Guristas", "0", "1531", "9680", "0"],
|
||||
["[NPC][Asteroid] Rogue Drone", "394", "666", "1090", "1687"],
|
||||
["[NPC][Deadspace] Rogue Drone", "276", "1071", "1069", "871"],
|
||||
["[NPC][Asteroid] Sanshas Nation", "5586", "4112", "0", "0"],
|
||||
["[NPC][Deadspace] Sanshas Nation", "3009", "2237", "0", "0"],
|
||||
["[NPC][Asteroid] Serpentis", "0", "5373", "4813", "0"],
|
||||
["[NPC][Deadspace] Serpentis", "0", "3110", "1929", "0"],
|
||||
["[NPC][Mission] Amarr Empire", "4464", "3546", "97", "0"],
|
||||
["[NPC][Mission] Caldari State", "0", "2139", "4867", "0"],
|
||||
["[NPC][Mission] CONCORD", "336", "134", "212", "412"],
|
||||
["[NPC][Mission] Gallente Federation", "9", "3712", "2758", "0"],
|
||||
["[NPC][Mission] Khanid", "612", "483", "43", "6"],
|
||||
["[NPC][Mission] Minmatar Republic", "1024", "388", "1655", "4285"],
|
||||
["[NPC][Mission] Mordus Legion", "25", "262", "625", "0"],
|
||||
["[NPC][Mission] Thukker", "0", "52", "10", "79"],
|
||||
["[NPC][Other] Sleepers", "1472", "1472", "1384", "1384"],
|
||||
["[NPC][Other] Sansha Incursion", "1682", "1347", "3678", "3678"]]
|
||||
|
||||
for damageProfileRow in damageProfileList:
|
||||
name, em, therm, kin, exp = damageProfileRow
|
||||
damageProfile = eos.db.getDamagePattern(name)
|
||||
if damageProfile is None:
|
||||
damageProfile = es_DamagePattern(em, therm, kin, exp)
|
||||
damageProfile.name = name
|
||||
eos.db.save(damageProfile)
|
||||
|
||||
@classmethod
|
||||
def importTargetProfileDefaults(cls):
|
||||
targetProfileList = [["Uniform (25%)", "0.25", "0.25", "0.25", "0.25"],
|
||||
["Uniform (50%)", "0.50", "0.50", "0.50", "0.50"],
|
||||
["Uniform (75%)", "0.75", "0.75", "0.75", "0.75"],
|
||||
["Uniform (90%)", "0.90", "0.90", "0.90", "0.90"],
|
||||
["[T1 Resist]Shield", "0.0", "0.20", "0.40", "0.50"],
|
||||
["[T1 Resist]Armor", "0.50", "0.45", "0.25", "0.10"],
|
||||
["[T1 Resist]Hull", "0.33", "0.33", "0.33", "0.33"],
|
||||
["[T1 Resist]Shield (+T2 DCU)", "0.125", "0.30", "0.475", "0.562"],
|
||||
["[T1 Resist]Armor (+T2 DCU)", "0.575", "0.532", "0.363", "0.235"],
|
||||
["[T1 Resist]Hull (+T2 DCU)", "0.598", "0.598", "0.598", "0.598"],
|
||||
["[T2 Resist]Amarr (Shield)", "0.0", "0.20", "0.70", "0.875"],
|
||||
["[T2 Resist]Amarr (Armor)", "0.50", "0.35", "0.625", "0.80"],
|
||||
["[T2 Resist]Caldari (Shield)", "0.20", "0.84", "0.76", "0.60"],
|
||||
["[T2 Resist]Caldari (Armor)", "0.50", "0.8625", "0.625", "0.10"],
|
||||
["[T2 Resist]Gallente (Shield)", "0.0", "0.60", "0.85", "0.50"],
|
||||
["[T2 Resist]Gallente (Armor)", "0.50", "0.675", "0.8375", "0.10"],
|
||||
["[T2 Resist]Minmatar (Shield)", "0.75", "0.60", "0.40", "0.50"],
|
||||
["[T2 Resist]Minmatar (Armor)", "0.90", "0.675", "0.25", "0.10"],
|
||||
["[NPC][Asteroid] Angel Cartel", "0.54", "0.42", "0.37", "0.32"],
|
||||
["[NPC][Asteroid] Blood Raiders", "0.34", "0.39", "0.45", "0.52"],
|
||||
["[NPC][Asteroid] Guristas", "0.55", "0.35", "0.3", "0.48"],
|
||||
["[NPC][Asteroid] Rogue Drones", "0.35", "0.38", "0.44", "0.49"],
|
||||
["[NPC][Asteroid] Sanshas Nation", "0.35", "0.4", "0.47", "0.53"],
|
||||
["[NPC][Asteroid] Serpentis", "0.49", "0.38", "0.29", "0.51"],
|
||||
["[NPC][Deadspace] Angel Cartel", "0.59", "0.48", "0.4", "0.32"],
|
||||
["[NPC][Deadspace] Blood Raiders", "0.31", "0.39", "0.47", "0.56"],
|
||||
["[NPC][Deadspace] Guristas", "0.57", "0.39", "0.31", "0.5"],
|
||||
["[NPC][Deadspace] Rogue Drones", "0.42", "0.42", "0.47", "0.49"],
|
||||
["[NPC][Deadspace] Sanshas Nation", "0.31", "0.39", "0.47", "0.56"],
|
||||
["[NPC][Deadspace] Serpentis", "0.49", "0.38", "0.29", "0.56"],
|
||||
["[NPC][Mission] Amarr Empire", "0.34", "0.38", "0.42", "0.46"],
|
||||
["[NPC][Mission] Caldari State", "0.51", "0.38", "0.3", "0.51"],
|
||||
["[NPC][Mission] CONCORD", "0.47", "0.46", "0.47", "0.47"],
|
||||
["[NPC][Mission] Gallente Federation", "0.51", "0.38", "0.31", "0.52"],
|
||||
["[NPC][Mission] Khanid", "0.51", "0.42", "0.36", "0.4"],
|
||||
["[NPC][Mission] Minmatar Republic", "0.51", "0.46", "0.41", "0.35"],
|
||||
["[NPC][Mission] Mordus Legion", "0.32", "0.48", "0.4", "0.62"],
|
||||
["[NPC][Other] Sleeper", "0.61", "0.61", "0.61", "0.61"],
|
||||
["[NPC][Other] Sansha Incursion", "0.65", "0.63", "0.64", "0.65"],
|
||||
["[NPC][Burner] Cruor (Blood Raiders)", "0.8", "0.73", "0.69", "0.67"],
|
||||
["[NPC][Burner] Dramiel (Angel)", "0.35", "0.48", "0.61", "0.68"],
|
||||
["[NPC][Burner] Daredevil (Serpentis)", "0.69", "0.59", "0.59", "0.43"],
|
||||
["[NPC][Burner] Succubus (Sanshas Nation)", "0.35", "0.48", "0.61", "0.68"],
|
||||
["[NPC][Burner] Worm (Guristas)", "0.48", "0.58", "0.69", "0.74"],
|
||||
["[NPC][Burner] Enyo", "0.58", "0.72", "0.86", "0.24"],
|
||||
["[NPC][Burner] Hawk", "0.3", "0.86", "0.79", "0.65"],
|
||||
["[NPC][Burner] Jaguar", "0.78", "0.65", "0.48", "0.56"],
|
||||
["[NPC][Burner] Vengeance", "0.66", "0.56", "0.75", "0.86"],
|
||||
["[NPC][Burner] Ashimmu (Blood Raiders)", "0.8", "0.76", "0.68", "0.7"],
|
||||
["[NPC][Burner] Talos", "0.68", "0.59", "0.59", "0.43"],
|
||||
["[NPC][Burner] Sentinel", "0.58", "0.45", "0.52", "0.66"]]
|
||||
|
||||
for targetProfileRow in targetProfileList:
|
||||
name = targetProfileRow[0]
|
||||
em = targetProfileRow[1]
|
||||
therm = targetProfileRow[2]
|
||||
kin = targetProfileRow[3]
|
||||
exp = targetProfileRow[4]
|
||||
try:
|
||||
maxVel = targetProfileRow[5]
|
||||
except IndexError:
|
||||
maxVel = None
|
||||
try:
|
||||
sigRad = targetProfileRow[6]
|
||||
except IndexError:
|
||||
sigRad = None
|
||||
try:
|
||||
radius = targetProfileRow[7]
|
||||
except:
|
||||
radius = None
|
||||
targetProfile = eos.db.eos.db.getTargetProfile(name)
|
||||
if targetProfile is None:
|
||||
targetProfile = es_TargetProfile(em, therm, kin, exp, maxVel, sigRad, radius)
|
||||
targetProfile.name = name
|
||||
eos.db.save(targetProfile)
|
||||
|
||||
@classmethod
|
||||
def importRequiredDefaults(cls):
|
||||
damageProfileList = [["Uniform", "25", "25", "25", "25"]]
|
||||
|
||||
for damageProfileRow in damageProfileList:
|
||||
name, em, therm, kin, exp = damageProfileRow
|
||||
damageProfile = eos.db.getDamagePattern(name)
|
||||
if damageProfile is None:
|
||||
damageProfile = es_DamagePattern(em, therm, kin, exp)
|
||||
damageProfile.name = name
|
||||
eos.db.save(damageProfile)
|
||||
@@ -1,4 +1,4 @@
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
# Copyright (C) 2011 Anton Vorobyov
|
||||
#
|
||||
# This file is part of eos.
|
||||
@@ -15,16 +15,15 @@
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
|
||||
from sqlalchemy import Column, Table, String
|
||||
from sqlalchemy.orm import mapper
|
||||
|
||||
from eos.types import MiscData
|
||||
from eos.db import saveddata_meta
|
||||
from eos.saveddata.miscData import MiscData
|
||||
|
||||
miscdata_table = Table("miscdata", saveddata_meta,
|
||||
Column("fieldName", String, primary_key=True),
|
||||
Column("fieldValue", String))
|
||||
Column("fieldName", String, primary_key=True),
|
||||
Column("fieldValue", String))
|
||||
|
||||
mapper(MiscData, miscdata_table)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
@@ -15,42 +15,25 @@
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
#===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, Float, ForeignKey, CheckConstraint, Boolean, DateTime
|
||||
from sqlalchemy.orm.collections import attribute_mapped_collection
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, CheckConstraint, Boolean
|
||||
from sqlalchemy.orm import relation, mapper
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.saveddata.module import Module
|
||||
from eos.saveddata.mutator import Mutator
|
||||
from eos.saveddata.fit import Fit
|
||||
from eos.types import Module, Fit
|
||||
|
||||
modules_table = Table("modules", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("fitID", Integer, ForeignKey("fits.ID"), nullable=False, index=True),
|
||||
Column("itemID", Integer, nullable=True),
|
||||
Column("baseItemID", Integer, nullable=True),
|
||||
Column("mutaplasmidID", Integer, nullable=True),
|
||||
Column("dummySlot", Integer, nullable=True, default=None),
|
||||
Column("ID", Integer, primary_key = True),
|
||||
Column("fitID", Integer, ForeignKey("fits.ID"), nullable = False, index = True),
|
||||
Column("itemID", Integer, nullable = True),
|
||||
Column("dummySlot", Integer, nullable = True, default = None),
|
||||
Column("chargeID", Integer),
|
||||
Column("state", Integer, CheckConstraint("state >= -1"), CheckConstraint("state <= 2")),
|
||||
Column("projected", Boolean, default=False, nullable=False),
|
||||
Column("projected", Boolean, default = False, nullable = False),
|
||||
Column("position", Integer),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now),
|
||||
Column("spoolType", Integer, nullable=True),
|
||||
Column("spoolAmount", Float, nullable=True),
|
||||
CheckConstraint('("dummySlot" = NULL OR "itemID" = NULL) AND "dummySlot" != "itemID"'))
|
||||
|
||||
mapper(Module, modules_table,
|
||||
properties={
|
||||
"owner": relation(Fit),
|
||||
"mutators": relation(
|
||||
Mutator,
|
||||
backref="module",
|
||||
cascade="all,delete-orphan",
|
||||
collection_class=attribute_mapped_collection('attrID')
|
||||
)
|
||||
})
|
||||
properties = {"owner" : relation(Fit)})
|
||||
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
# ===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
#
|
||||
# eos is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# eos is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
import datetime
|
||||
|
||||
from sqlalchemy import Column, DateTime, Float, ForeignKey, Integer, Table
|
||||
from sqlalchemy.orm import mapper
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.saveddata.mutator import Mutator
|
||||
|
||||
mutator_table = Table("mutators", saveddata_meta,
|
||||
Column("moduleID", Integer, ForeignKey("modules.ID"), primary_key=True, index=True),
|
||||
Column("attrID", Integer, primary_key=True, index=True),
|
||||
Column("value", Float, nullable=False),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now))
|
||||
|
||||
mapper(Mutator, mutator_table)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user