Compare commits
1036 Commits
preview_ba
...
v2.0.0b4
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7956ca0409 | ||
|
|
5b27d0559f | ||
|
|
85157fdf85 | ||
|
|
caf8e7f2d6 | ||
|
|
124bb027ab | ||
|
|
d5ca14ca52 | ||
|
|
99982aa547 | ||
|
|
3f4493c0ea | ||
|
|
cdb604b29f | ||
|
|
d3b8cebc8a | ||
|
|
7d245660bc | ||
|
|
b1e40427a3 | ||
|
|
73925df24b | ||
|
|
04fbd3b548 | ||
|
|
d9dd94d6c3 | ||
|
|
963353a1dc | ||
|
|
3411dcfd91 | ||
|
|
ae6434affb | ||
|
|
f773e0a935 | ||
|
|
7ab3ad9e08 | ||
|
|
3f3a82ca6c | ||
|
|
e902cc5780 | ||
|
|
fcb6952119 | ||
|
|
3eecd57979 | ||
|
|
6e73b9fefd | ||
|
|
f0b0285f77 | ||
|
|
e706a015b3 | ||
|
|
a804f9a1ad | ||
|
|
9e1b7dbb87 | ||
|
|
551ffe9ed3 | ||
|
|
cbe1ce5bcd | ||
|
|
917afd5067 | ||
|
|
c3fb9231a4 | ||
|
|
2ed2f8e262 | ||
|
|
390d90ac47 | ||
|
|
4c877e7a5a | ||
|
|
972fe433f7 | ||
|
|
ecaf6f96a9 | ||
|
|
8541e2a869 | ||
|
|
48a2963472 | ||
|
|
f245a02372 | ||
|
|
1c16343b46 | ||
|
|
d0f66f2d16 | ||
|
|
df0c6ed269 | ||
|
|
9a4e26a5be | ||
|
|
2b87f91279 | ||
|
|
9b48b61b9b | ||
|
|
57e67b3699 | ||
|
|
15e60c3d24 | ||
|
|
ebf07db6c6 | ||
|
|
1559767201 | ||
|
|
6e5e52df37 | ||
|
|
e9289c102b | ||
|
|
81d61d7e29 | ||
|
|
9ab4ec2d4f | ||
|
|
565332dfcd | ||
|
|
f94fbd740a | ||
|
|
a2719ec2f7 | ||
|
|
1da12cb18e | ||
|
|
e0cddcd061 | ||
|
|
78f632a4f6 | ||
|
|
52754535a0 | ||
|
|
a9db667c9c | ||
|
|
f442632fbc | ||
|
|
362086cc83 | ||
|
|
73d59569ff | ||
|
|
cb4fadf84c | ||
|
|
e85d144928 | ||
|
|
07099f4057 | ||
|
|
96c13c344a | ||
|
|
8f42822d9e | ||
|
|
c5ba79cfbb | ||
|
|
f69f76856a | ||
|
|
3a23d820cc | ||
|
|
c917d22db5 | ||
|
|
c3f8b102fa | ||
|
|
5e566db47d | ||
|
|
5b2c5907ed | ||
|
|
dd78a41171 | ||
|
|
201fb4e241 | ||
|
|
8abd25fe40 | ||
|
|
d59c897921 | ||
|
|
cb89d13d9f | ||
|
|
a1aa78adc0 | ||
|
|
4bbbd33917 | ||
|
|
42ccc53166 | ||
|
|
35ad21da38 | ||
|
|
72efef818f | ||
|
|
5571bae8b2 | ||
|
|
8b2cfe44f2 | ||
|
|
5ea7215ec0 | ||
|
|
e2ac90a040 | ||
|
|
6ab84240a2 | ||
|
|
12a526fa9e | ||
|
|
82e3db1ffb | ||
|
|
e779bb84e2 | ||
|
|
bd0fcbef3a | ||
|
|
0456cb2f96 | ||
|
|
cafd92f169 | ||
|
|
05f08970c9 | ||
|
|
f66485c48a | ||
|
|
2153c05183 | ||
|
|
f66c36e42b | ||
|
|
8771de9c1f | ||
|
|
fa3cf90421 | ||
|
|
636ee1de23 | ||
|
|
b36bf8fb21 | ||
|
|
55ad52919e | ||
|
|
0648feaa7b | ||
|
|
e16ae88ed9 | ||
|
|
c5c673e360 | ||
|
|
5cd21da7b1 | ||
|
|
da0f89ef91 | ||
|
|
efc3f1ba7d | ||
|
|
8d7f0b56f1 | ||
|
|
89b6e8f8c3 | ||
|
|
c6e715aa5c | ||
|
|
0e93136157 | ||
|
|
89c84aac96 | ||
|
|
649ba9f3e4 | ||
|
|
2d38fd4773 | ||
|
|
23f0f48c80 | ||
|
|
6254fa3e23 | ||
|
|
8686c02b1f | ||
|
|
33618f12f7 | ||
|
|
5f0ce58c29 | ||
|
|
c329f5eeb8 | ||
|
|
320a0230f4 | ||
|
|
d94d49f0a9 | ||
|
|
9bfc5b35b6 | ||
|
|
0f30ea554d | ||
|
|
50dd74dbe8 | ||
|
|
9c355d8f96 | ||
|
|
f81179db4b | ||
|
|
2d7eed4aac | ||
|
|
96828a5611 | ||
|
|
8a0f8ae609 | ||
|
|
ac8f805fcb | ||
|
|
9f3c2ca7a8 | ||
|
|
2e8d7d3610 | ||
|
|
3179016aed | ||
|
|
514db91d7a | ||
|
|
269d3f326d | ||
|
|
b04fcb0d15 | ||
|
|
a29b3da8a6 | ||
|
|
18a5e35e80 | ||
|
|
3072dbb3ff | ||
|
|
b103f0df7e | ||
|
|
9f7e4e0dc0 | ||
|
|
29d175c7b3 | ||
|
|
0c971a127c | ||
|
|
40ce7b7c0b | ||
|
|
a8060a5e15 | ||
|
|
cc063d1934 | ||
|
|
531721ef5e | ||
|
|
2936b88c3d | ||
|
|
2365112292 | ||
|
|
1daef5354d | ||
|
|
6f19d45c6d | ||
|
|
ac7908c62c | ||
|
|
c000b19986 | ||
|
|
1da127c898 | ||
|
|
fcdf55632f | ||
|
|
dd3bc66896 | ||
|
|
da67cdba9b | ||
|
|
8850da6cdb | ||
|
|
7d46d7e22d | ||
|
|
b3157303cd | ||
|
|
65e17119af | ||
|
|
f7b509f9a9 | ||
|
|
dea775058d | ||
|
|
3c405f51d8 | ||
|
|
337f0a9c8a | ||
|
|
53936a3e66 | ||
|
|
30a6e29b39 | ||
|
|
f3dc3bc654 | ||
|
|
0a935cf149 | ||
|
|
5697da7ec2 | ||
|
|
a7086b78cb | ||
|
|
e032c9c5b1 | ||
|
|
46b0aded03 | ||
|
|
d23398ce29 | ||
|
|
5e0a5da6d5 | ||
|
|
6e112b9ed5 | ||
|
|
b30b3fcbf1 | ||
|
|
455fea75c0 | ||
|
|
bd181559ed | ||
|
|
967128cb69 | ||
|
|
5fa43138b3 | ||
|
|
f0baba10df | ||
|
|
7058018aed | ||
|
|
d45d7204d2 | ||
|
|
b596447095 | ||
|
|
25df05a15d | ||
|
|
4a33365195 | ||
|
|
5d5646df79 | ||
|
|
405a965046 | ||
|
|
4138088d4a | ||
|
|
de44e6f932 | ||
|
|
d4df989427 | ||
|
|
6c8b143936 | ||
|
|
0f94557699 | ||
|
|
a52b9e58e9 | ||
|
|
0d8904d59f | ||
|
|
d956cb7861 | ||
|
|
e848cec815 | ||
|
|
ce8f1d385d | ||
|
|
90e338b969 | ||
|
|
1978f5cb92 | ||
|
|
979bc494f0 | ||
|
|
cc9b6ea04b | ||
|
|
8cbb327659 | ||
|
|
2f6e306f8e | ||
|
|
065747f425 | ||
|
|
c4f41fb5fa | ||
|
|
e4f3ed05cc | ||
|
|
0e84d6c557 | ||
|
|
c384848870 | ||
|
|
1fc9b2941d | ||
|
|
e9be07f281 | ||
|
|
df7dc30e7e | ||
|
|
d0235f6d93 | ||
|
|
de646cf252 | ||
|
|
21838f2d9a | ||
|
|
266398b1de | ||
|
|
0cc646bab9 | ||
|
|
cebca64e5e | ||
|
|
bd0de82a8e | ||
|
|
0f5ae8d9b6 | ||
|
|
5ae7805bb1 | ||
|
|
6a382c4445 | ||
|
|
3b0c8b6117 | ||
|
|
0e1e4cad6d | ||
|
|
e0b92198b0 | ||
|
|
ce3b94696a | ||
|
|
cacf286176 | ||
|
|
a02bbe3964 | ||
|
|
866d410695 | ||
|
|
ea6f31c021 | ||
|
|
d522fff511 | ||
|
|
ca27cd3250 | ||
|
|
20a1618e41 | ||
|
|
2830729cb4 | ||
|
|
c0e15f61c6 | ||
|
|
4355f35eef | ||
|
|
029e61edaf | ||
|
|
b18f82505b | ||
|
|
f8a65de47f | ||
|
|
e5a0ef1877 | ||
|
|
8647fe460c | ||
|
|
a4950e9015 | ||
|
|
82d50cfa0d | ||
|
|
efea46006a | ||
|
|
d9800dcf19 | ||
|
|
266b3ce985 | ||
|
|
3b61a07c55 | ||
|
|
5dc092ace5 | ||
|
|
7b32fe08ac | ||
|
|
fd6e318ae3 | ||
|
|
504c395ea2 | ||
|
|
9ed210d5b5 | ||
|
|
c7f0f6215a | ||
|
|
34c692c972 | ||
|
|
56f34873a6 | ||
|
|
b4dfcf53fb | ||
|
|
3c26676b66 | ||
|
|
3fb7411b9d | ||
|
|
cac8e528a3 | ||
|
|
2afc8b1abe | ||
|
|
06e82a04b4 | ||
|
|
782df18501 | ||
|
|
a87d659e5d | ||
|
|
cd35c9319c | ||
|
|
dfe3d5c82a | ||
|
|
ccd8cdd099 | ||
|
|
72062e6213 | ||
|
|
10836acab2 | ||
|
|
08ab8fbc5b | ||
|
|
da5aaf2f78 | ||
|
|
4484b68a3a | ||
|
|
81fce93186 | ||
|
|
d58987e427 | ||
|
|
05a5ebff00 | ||
|
|
2f28afc7ae | ||
|
|
0134c3b9a1 | ||
|
|
9f433fd072 | ||
|
|
7729bd9210 | ||
|
|
ce5803dd9c | ||
|
|
821ede8579 | ||
|
|
592f9a2298 | ||
|
|
c30c07b91c | ||
|
|
73d9dd60e6 | ||
|
|
dbdbe155f3 | ||
|
|
7d41260863 | ||
|
|
d53ff0f502 | ||
|
|
f37f4573bf | ||
|
|
ee392e07c4 | ||
|
|
59b09a255a | ||
|
|
3e73c2eb34 | ||
|
|
3625b4395a | ||
|
|
1444a24e70 | ||
|
|
73dfa25ef0 | ||
|
|
a01284ff1c | ||
|
|
8cc81cebdd | ||
|
|
c33aeb7275 | ||
|
|
c0237905b6 | ||
|
|
eda6d738a4 | ||
|
|
eef3b420a9 | ||
|
|
caef752a74 | ||
|
|
e13bd7af52 | ||
|
|
c263575788 | ||
|
|
5bd80d3d06 | ||
|
|
47cbc71e8a | ||
|
|
fdba8d147f | ||
|
|
4720b611b0 | ||
|
|
1f43410780 | ||
|
|
ead595e666 | ||
|
|
89cf40423b | ||
|
|
95d6944298 | ||
|
|
a3c19d1d1c | ||
|
|
88185cb886 | ||
|
|
7962aa7b1b | ||
|
|
220ce23e8f | ||
|
|
66752b2cbf | ||
|
|
f59a65f314 | ||
|
|
725034ebc5 | ||
|
|
0210020658 | ||
|
|
22dc6512c0 | ||
|
|
709b4524e6 | ||
|
|
bc462ede3e | ||
|
|
277bd14453 | ||
|
|
59fed2c516 | ||
|
|
d19486ce15 | ||
|
|
b55fcfaf04 | ||
|
|
60e5085724 | ||
|
|
5adde95807 | ||
|
|
3da0b8bc29 | ||
|
|
0104e8954a | ||
|
|
78e065feff | ||
|
|
30fe7930a9 | ||
|
|
453054f6c7 | ||
|
|
231f9a91c2 | ||
|
|
c553790eaf | ||
|
|
f2e8506ea7 | ||
|
|
76b22dfe17 | ||
|
|
1202c335d9 | ||
|
|
f70d671f1d | ||
|
|
1305a1f2b6 | ||
|
|
f5108c34db | ||
|
|
d553c60b65 | ||
|
|
be84cf1db7 | ||
|
|
854d7cb09c | ||
|
|
b70f401a91 | ||
|
|
496b28226d | ||
|
|
fec7c8fd0b | ||
|
|
f8e5e15051 | ||
|
|
78b273455d | ||
|
|
de35692666 | ||
|
|
49b2e42d8c | ||
|
|
307ccd10e8 | ||
|
|
1225f2074a | ||
|
|
afee4469f0 | ||
|
|
3b716e6e5e | ||
|
|
961b389b40 | ||
|
|
e9c691575f | ||
|
|
27236feec3 | ||
|
|
2e598a7ef7 | ||
|
|
0527a506ac | ||
|
|
1082d8a173 | ||
|
|
7ae41b71b2 | ||
|
|
de5a734919 | ||
|
|
987c55ed8f | ||
|
|
57783fe80f | ||
|
|
f24c2ddd22 | ||
|
|
58a242f753 | ||
|
|
5c2cf6dd15 | ||
|
|
287f3bc510 | ||
|
|
caaa6bcd0c | ||
|
|
b01c961251 | ||
|
|
826c73c903 | ||
|
|
b6a28ae15f | ||
|
|
ad0d55faed | ||
|
|
910ae6315d | ||
|
|
25b4d3a45b | ||
|
|
f355cb3a72 | ||
|
|
ff58648f94 | ||
|
|
f16e14e0b4 | ||
|
|
729c46ab00 | ||
|
|
3b546de070 | ||
|
|
ba64f75f88 | ||
|
|
4b8f2ce9e7 | ||
|
|
8c5c7fba29 | ||
|
|
bec26d5d05 | ||
|
|
8f369daf1e | ||
|
|
43cbdc1e57 | ||
|
|
cf4d0706ae | ||
|
|
2857eff884 | ||
|
|
050f1b4add | ||
|
|
2bbcd96ce3 | ||
|
|
1cbd8ee901 | ||
|
|
7e86cb0f84 | ||
|
|
0e7dccccfe | ||
|
|
9d75dea31a | ||
|
|
610f501608 | ||
|
|
361f7fc5bb | ||
|
|
951b35a345 | ||
|
|
6c317d56ee | ||
|
|
2acb3e759e | ||
|
|
9712ec4fbf | ||
|
|
ae0da59ed2 | ||
|
|
8e4db5a8c3 | ||
|
|
7f392006d1 | ||
|
|
828b18d0fd | ||
|
|
ad535ccc78 | ||
|
|
9dee761f6d | ||
|
|
f5531b458b | ||
|
|
a7c89f7b40 | ||
|
|
048ae0205c | ||
|
|
e6a0784466 | ||
|
|
53d15425f3 | ||
|
|
a79257271e | ||
|
|
94256876e4 | ||
|
|
d319b74d7a | ||
|
|
acb7c0878e | ||
|
|
47fcbefe8b | ||
|
|
c2cb5d763f | ||
|
|
208148b670 | ||
|
|
cf02c74272 | ||
|
|
f77cd0b834 | ||
|
|
461f5219e5 | ||
|
|
373ead8da6 | ||
|
|
3371086d54 | ||
|
|
45c3533501 | ||
|
|
4286f2ea5a | ||
|
|
344546a583 | ||
|
|
46f61ecfa3 | ||
|
|
2ec9c5addb | ||
|
|
d09f21fe15 | ||
|
|
33410f0618 | ||
|
|
2c98baade8 | ||
|
|
380f90d065 | ||
|
|
1781c6dcf0 | ||
|
|
cff3d0ee95 | ||
|
|
d1fc2dd6ec | ||
|
|
218712835e | ||
|
|
5a60cff9a1 | ||
|
|
18554e3186 | ||
|
|
2e2303c9aa | ||
|
|
fbf455c9fe | ||
|
|
00dcda6fad | ||
|
|
3be7538daa | ||
|
|
9e0638c54c | ||
|
|
a5ff46fbd1 | ||
|
|
6900938208 | ||
|
|
4887817142 | ||
|
|
0d721b55a7 | ||
|
|
65f71b6f6b | ||
|
|
89b508d0f6 | ||
|
|
6344f7a975 | ||
|
|
11eea37aa7 | ||
|
|
bff44c46f8 | ||
|
|
7a0ea759ef | ||
|
|
f9419977c0 | ||
|
|
da33a372cb | ||
|
|
0db141d0a6 | ||
|
|
79445225de | ||
|
|
a0f5e4cbb3 | ||
|
|
3b23c9eacd | ||
|
|
d4ce1ef3db | ||
|
|
b4f063b07a | ||
|
|
ffe840e245 | ||
|
|
2e3c3e92f1 | ||
|
|
5e20d6973f | ||
|
|
c68739d78c | ||
|
|
5392aaa27e | ||
|
|
fee82d294b | ||
|
|
9f5e14fe2d | ||
|
|
b39ec4f9b3 | ||
|
|
25bfb4ddb8 | ||
|
|
de9b167242 | ||
|
|
18d44979e4 | ||
|
|
1eddad859a | ||
|
|
e2de1565bf | ||
|
|
fc9408d062 | ||
|
|
cb98618e5e | ||
|
|
892ee13542 | ||
|
|
2595c59c73 | ||
|
|
93c26aa02f | ||
|
|
a6a153b703 | ||
|
|
38e6e7d5ed | ||
|
|
4d8c9e0522 | ||
|
|
e75e2b7605 | ||
|
|
0e81c985a3 | ||
|
|
21a071ea40 | ||
|
|
4f1e7697ed | ||
|
|
ccfd414cb3 | ||
|
|
3de63c5ca5 | ||
|
|
42e64a6223 | ||
|
|
53f589284b | ||
|
|
aecbf8f86e | ||
|
|
dd09b3d951 | ||
|
|
f022c2474c | ||
|
|
249956689a | ||
|
|
6b22bd189a | ||
|
|
524bb43113 | ||
|
|
83a7a854bb | ||
|
|
0507a55731 | ||
|
|
f1eb3f68d1 | ||
|
|
1604ea1f2c | ||
|
|
4a9662c0f7 | ||
|
|
47a683c44c | ||
|
|
d888982657 | ||
|
|
184c46dbce | ||
|
|
b2ae6913d7 | ||
|
|
703d6dbc85 | ||
|
|
2e6066e848 | ||
|
|
f751eff1cf | ||
|
|
d342e66db4 | ||
|
|
1222567a4f | ||
|
|
1f7858f853 | ||
|
|
283cb1e50c | ||
|
|
c2880c0175 | ||
|
|
3d0a4b84d4 | ||
|
|
622efb624d | ||
|
|
50965244c0 | ||
|
|
9e6aca2496 | ||
|
|
1cf844d415 | ||
|
|
f57e7cf1ec | ||
|
|
bedd6efff2 | ||
|
|
9bffc6afd6 | ||
|
|
77b5e8afdc | ||
|
|
f9b7376cc7 | ||
|
|
34723d7bb1 | ||
|
|
32f417ce5a | ||
|
|
5657438e3c | ||
|
|
f7349316b4 | ||
|
|
c0f74cd0a3 | ||
|
|
bb73065b43 | ||
|
|
ae99a179d9 | ||
|
|
b4930d15b7 | ||
|
|
86e6250a63 | ||
|
|
c9a04e886a | ||
|
|
b21c850598 | ||
|
|
58abdcfa22 | ||
|
|
d0a4fa1c9d | ||
|
|
e94b4a1c18 | ||
|
|
2a679efc14 | ||
|
|
4652b91e98 | ||
|
|
0079f76f67 | ||
|
|
711537dcf4 | ||
|
|
e971b7927e | ||
|
|
3afa829173 | ||
|
|
cd30d74393 | ||
|
|
25a945f9e6 | ||
|
|
504d582b59 | ||
|
|
3d8a80aeaa | ||
|
|
9ac609528b | ||
|
|
988861af1f | ||
|
|
66a8669c64 | ||
|
|
99b0322289 | ||
|
|
b0e7c02a6a | ||
|
|
f0fc5643e9 | ||
|
|
8c83ac120f | ||
|
|
53cab4b1ab | ||
|
|
6d176c0010 | ||
|
|
1aaa93c1fd | ||
|
|
9d1c77f5b7 | ||
|
|
3b351603a3 | ||
|
|
f563b2666d | ||
|
|
cede46d4a4 | ||
|
|
90d854fd24 | ||
|
|
9c406a0a44 | ||
|
|
81160e9e72 | ||
|
|
7bfd19dca4 | ||
|
|
8ff244d509 | ||
|
|
e2061a25c2 | ||
|
|
272201c0fc | ||
|
|
54930d7d50 | ||
|
|
5ddac97719 | ||
|
|
973432b997 | ||
|
|
1ee679903c | ||
|
|
89614f7948 | ||
|
|
dec5970bd6 | ||
|
|
a374fb4a12 | ||
|
|
5d066f1401 | ||
|
|
e8749d379a | ||
|
|
b02506d6cd | ||
|
|
cca61b8ac1 | ||
|
|
962c45b807 | ||
|
|
9646cee324 | ||
|
|
04242ef52c | ||
|
|
da9816d1bd | ||
|
|
283aba4f85 | ||
|
|
080590a292 | ||
|
|
a7ec10525e | ||
|
|
e28bf545e9 | ||
|
|
5aeb3a38c8 | ||
|
|
c00372900f | ||
|
|
7f50503aec | ||
|
|
87a600349c | ||
|
|
114246bf1e | ||
|
|
ebda1349cf | ||
|
|
45a8aaf387 | ||
|
|
514933e0e4 | ||
|
|
6f914386ec | ||
|
|
efaede0929 | ||
|
|
73821cbe49 | ||
|
|
e245891cb4 | ||
|
|
50e604f412 | ||
|
|
4e74555b5d | ||
|
|
894c86ebc3 | ||
|
|
47ea12f4ba | ||
|
|
ce01c66b3e | ||
|
|
1260900668 | ||
|
|
a86ff19c98 | ||
|
|
c07bcf6a29 | ||
|
|
82f6d26a88 | ||
|
|
d1e8f5cf5c | ||
|
|
82edaf05d3 | ||
|
|
fadc514592 | ||
|
|
fc7ca56f8b | ||
|
|
d629061506 | ||
|
|
4a964ab6be | ||
|
|
53957c24df | ||
|
|
addff1118c | ||
|
|
6a1d106fea | ||
|
|
07f88cc4d1 | ||
|
|
5f35553438 | ||
|
|
802e2afcc0 | ||
|
|
ee09b41e08 | ||
|
|
994bd72409 | ||
|
|
01eaba6697 | ||
|
|
d4d69522f5 | ||
|
|
dd88d64d67 | ||
|
|
0ecaa4d36c | ||
|
|
e9e8ef964c | ||
|
|
c2360528ee | ||
|
|
3221fba02c | ||
|
|
c1b74f481a | ||
|
|
e2c0dc7e92 | ||
|
|
df510c40f4 | ||
|
|
48db3c8ce4 | ||
|
|
9d029b46b1 | ||
|
|
d2af4f7d9c | ||
|
|
3dd539b4f0 | ||
|
|
1daafd15e6 | ||
|
|
8fd424814a | ||
|
|
4d8c3eadbc | ||
|
|
92722d491f | ||
|
|
c560d17bdd | ||
|
|
55dff65142 | ||
|
|
cdec6fad99 | ||
|
|
216dac068d | ||
|
|
f9ec79c8eb | ||
|
|
112ab91e34 | ||
|
|
59b0ac127f | ||
|
|
e03b3227d4 | ||
|
|
f08a66924c | ||
|
|
535b75225b | ||
|
|
b99c44667e | ||
|
|
c6d50b7d6e | ||
|
|
03f12bfca1 | ||
|
|
78a8c105f4 | ||
|
|
878d9d7148 | ||
|
|
cb77987129 | ||
|
|
37ec3d41d9 | ||
|
|
6ececbb81a | ||
|
|
a67c28a4c9 | ||
|
|
c3a0176c88 | ||
|
|
e7de57fc7a | ||
|
|
d928df9c69 | ||
|
|
b18bb200a9 | ||
|
|
225011f2c1 | ||
|
|
024edc500f | ||
|
|
42d59d21a6 | ||
|
|
4795571629 | ||
|
|
df587892a3 | ||
|
|
57a1f71fdd | ||
|
|
1ffcdc92f2 | ||
|
|
bd043c2358 | ||
|
|
fbd0a6ddda | ||
|
|
e7aceb6a8b | ||
|
|
fd745c5f62 | ||
|
|
41628ca17f | ||
|
|
21fbbf154d | ||
|
|
66c4aefc1e | ||
|
|
5d5c95fb2c | ||
|
|
13305d9ee1 | ||
|
|
4a9638767d | ||
|
|
55cd0d5000 | ||
|
|
1da15cfde3 | ||
|
|
9df0ee9a1c | ||
|
|
8baeafbdc2 | ||
|
|
aacddb251d | ||
|
|
1240e0f768 | ||
|
|
cdbd2979d2 | ||
|
|
da493d07c0 | ||
|
|
bccb1d2805 | ||
|
|
d84787b041 | ||
|
|
f1d6252d8b | ||
|
|
38a7a50d1c | ||
|
|
3ca181fce4 | ||
|
|
9eeb01b746 | ||
|
|
9ef9a0f1fb | ||
|
|
599b5318fc | ||
|
|
75df133071 | ||
|
|
403b39659c | ||
|
|
8d357ee282 | ||
|
|
96ad687944 | ||
|
|
79040adf1a | ||
|
|
cbcc81d42e | ||
|
|
732386b83a | ||
|
|
4065022866 | ||
|
|
6c1d949cef | ||
|
|
c4293bba6b | ||
|
|
0a1008f64f | ||
|
|
a5cb2a9f00 | ||
|
|
4a6c0db4fa | ||
|
|
f31ec9a154 | ||
|
|
b4ad4becd3 | ||
|
|
554807968a | ||
|
|
5cecb24654 | ||
|
|
5911538e29 | ||
|
|
0ccd812398 | ||
|
|
9c693cd7ed | ||
|
|
fbcd8396db | ||
|
|
b1be2eee06 | ||
|
|
bca26721ff | ||
|
|
301e874cd4 | ||
|
|
2ff66970ad | ||
|
|
58142664cc | ||
|
|
73f6a68b9d | ||
|
|
b88f4bc77f | ||
|
|
e3c2220a1c | ||
|
|
26238e731d | ||
|
|
717db2f906 | ||
|
|
1c59a27902 | ||
|
|
6ce52a1da9 | ||
|
|
f2a0e42810 | ||
|
|
63f2f85c25 | ||
|
|
80375d1f5b | ||
|
|
20ac2ad847 | ||
|
|
3b5fc7e13c | ||
|
|
73a12aedeb | ||
|
|
2ce6592048 | ||
|
|
a588aec978 | ||
|
|
8b124f3194 | ||
|
|
588ac62b6f | ||
|
|
63001d83b9 | ||
|
|
1112415f93 | ||
|
|
4633a1c136 | ||
|
|
8991e1dbad | ||
|
|
d5f6f7b37c | ||
|
|
4ffec21cd3 | ||
|
|
9d34c2a2e6 | ||
|
|
4f12caa05f | ||
|
|
649ba71c0f | ||
|
|
af5a9b31bc | ||
|
|
b72304203c | ||
|
|
f70d250995 | ||
|
|
a4d07a4611 | ||
|
|
91b1b1dfc2 | ||
|
|
53937d1723 | ||
|
|
46626e9a63 | ||
|
|
b5626c13b1 | ||
|
|
84de420002 | ||
|
|
99f939026f | ||
|
|
ad973dd51a | ||
|
|
9da485da55 | ||
|
|
da922601cd | ||
|
|
8322307ae0 | ||
|
|
58977810f9 | ||
|
|
05facc6961 | ||
|
|
f8db54136e | ||
|
|
ce972ceddd | ||
|
|
9ad61c1e23 | ||
|
|
1c4128b9a1 | ||
|
|
2c4a2f7dd9 | ||
|
|
9befaf7c91 | ||
|
|
ab34b31219 | ||
|
|
b9b56f5e73 | ||
|
|
d28312f13b | ||
|
|
9134464a39 | ||
|
|
ff5d40dcd3 | ||
|
|
9a284bc740 | ||
|
|
d67573f83a | ||
|
|
c97a388fc4 | ||
|
|
370b58d500 | ||
|
|
dbffdedc92 | ||
|
|
712b1811b3 | ||
|
|
f2fa598ed7 | ||
|
|
5283c50f84 | ||
|
|
ee7dbd5208 | ||
|
|
821298b6d0 | ||
|
|
dc6cde86c9 | ||
|
|
1826122381 | ||
|
|
fbf35edae2 | ||
|
|
70934e05e1 | ||
|
|
b235dddbe0 | ||
|
|
fc45cef834 | ||
|
|
a43de00153 | ||
|
|
949144ab25 | ||
|
|
68e75025be | ||
|
|
77e5bebf13 | ||
|
|
2a72d10bfc | ||
|
|
5aa5732606 | ||
|
|
3209459e6d | ||
|
|
0bc0cb7f82 | ||
|
|
4e9d2a5e18 | ||
|
|
915df1c731 | ||
|
|
7eb193cbbb | ||
|
|
b63c5fa1ff | ||
|
|
e4d0f2dc6f | ||
|
|
767cb46d40 | ||
|
|
c430a2d798 | ||
|
|
1c273210e9 | ||
|
|
db4e5e05a4 | ||
|
|
c6483c4b98 | ||
|
|
94c098cce3 | ||
|
|
6eddfd2694 | ||
|
|
cb15a3a1d6 | ||
|
|
cd516f80bb | ||
|
|
1d9b85c043 | ||
|
|
1ae5f64696 | ||
|
|
bf24e5f518 | ||
|
|
7115cd178c | ||
|
|
6da806c9c5 | ||
|
|
4895d76bea | ||
|
|
f527f9533e | ||
|
|
5adfcbf343 | ||
|
|
c0e0612e02 | ||
|
|
4ba4a753cb | ||
|
|
3cbd154c8a | ||
|
|
6ab936ef09 | ||
|
|
3383153b66 | ||
|
|
04b412dd5b | ||
|
|
ac20030612 | ||
|
|
083c5d1722 | ||
|
|
baec24ef14 | ||
|
|
df7f0b20f5 | ||
|
|
e5e3133869 | ||
|
|
ca6ef546e5 | ||
|
|
c24798ad2f | ||
|
|
3411eca1e7 | ||
|
|
43073069c7 | ||
|
|
db330ad035 | ||
|
|
b3c7273681 | ||
|
|
3892ac5996 | ||
|
|
249841c0db | ||
|
|
8900674b72 | ||
|
|
5ca882d3ea | ||
|
|
516ff6f8e6 | ||
|
|
dc9c481b80 | ||
|
|
5c55290944 | ||
|
|
86a145f01c | ||
|
|
78597a8554 | ||
|
|
0288ba6012 | ||
|
|
bada45ec69 | ||
|
|
ee407913c3 | ||
|
|
6e643d7579 | ||
|
|
b8c771f747 | ||
|
|
a07e1d8f3a | ||
|
|
565a78610e | ||
|
|
cdbf4cf5ec | ||
|
|
ec5c7e5d0e | ||
|
|
b583589849 | ||
|
|
c3144088ca | ||
|
|
7df3fe266e | ||
|
|
b351befa97 | ||
|
|
3925386109 | ||
|
|
cdf392e3e5 | ||
|
|
e51ca85e26 | ||
|
|
4ba11d832a | ||
|
|
66d78fed2c | ||
|
|
e8101f9410 | ||
|
|
56f1e9ed3a | ||
|
|
fff67906d6 | ||
|
|
343331c667 | ||
|
|
201d596c80 | ||
|
|
6f733394f7 | ||
|
|
b164b86eda | ||
|
|
eb97a1c11c | ||
|
|
13996d7770 | ||
|
|
6e04707457 | ||
|
|
25b17a221c | ||
|
|
de50bbb16f | ||
|
|
e835cb7f8c | ||
|
|
7fea0c9431 | ||
|
|
bcd045ae4e | ||
|
|
3f46db55b6 | ||
|
|
93e7b4f5cf | ||
|
|
a865028420 | ||
|
|
489637ec67 | ||
|
|
71ca7d153e | ||
|
|
0a37834fd8 | ||
|
|
0cc279b351 | ||
|
|
06baa10cdc | ||
|
|
c406102995 | ||
|
|
0dc3eda99f | ||
|
|
2dca9fc3f3 | ||
|
|
97cdd751b8 | ||
|
|
159472f82c | ||
|
|
be9eaa0859 | ||
|
|
35e330f574 | ||
|
|
e7a5cb4b1d | ||
|
|
c9c29da803 | ||
|
|
2527fe2e1e | ||
|
|
a4de0f2b5b | ||
|
|
401d0d58d5 | ||
|
|
441b3f1646 | ||
|
|
3b185e1bcb | ||
|
|
beed414429 | ||
|
|
99b2fdefcc | ||
|
|
9ec6a829c2 | ||
|
|
913f70b24d | ||
|
|
5a4d68684e | ||
|
|
096842de44 | ||
|
|
81e5b33bc9 | ||
|
|
5c91f29efb | ||
|
|
bfcb73f344 | ||
|
|
a2ca5eb8a4 | ||
|
|
f34e564ccd | ||
|
|
0cba428c81 | ||
|
|
423a433e16 | ||
|
|
bdee8f662c | ||
|
|
62bb550afd | ||
|
|
3562577521 | ||
|
|
8c7682c6d4 | ||
|
|
2b45a16872 | ||
|
|
73cc17ce72 | ||
|
|
07a2020303 | ||
|
|
0fc45ead6e | ||
|
|
6b37e2c973 | ||
|
|
cdabd83afe | ||
|
|
f610f525ba | ||
|
|
514f927a56 | ||
|
|
3c641606da | ||
|
|
af44d878d1 | ||
|
|
72ebafe925 | ||
|
|
134365c764 | ||
|
|
b6871c72ca | ||
|
|
aaece725aa | ||
|
|
ef6da6a96b | ||
|
|
df9daf9b4b | ||
|
|
228425fdb9 | ||
|
|
9775d1064e | ||
|
|
1a9591d411 | ||
|
|
64070a0798 | ||
|
|
bfa772609a | ||
|
|
d15acf89ba | ||
|
|
d25673b8fe | ||
|
|
dd9924a7a7 | ||
|
|
0f00b7237a | ||
|
|
579a13da44 | ||
|
|
7865219164 | ||
|
|
f2c84692a9 | ||
|
|
f61aeb8285 | ||
|
|
11d7f9d029 | ||
|
|
72633825cf | ||
|
|
9a137bb158 | ||
|
|
32944f4c9c | ||
|
|
23c6f2add5 | ||
|
|
e535f050c1 | ||
|
|
7a2feb9152 | ||
|
|
3b91ec8c06 | ||
|
|
636474610b | ||
|
|
6e524de320 | ||
|
|
3e916e42f2 | ||
|
|
de87c992d7 | ||
|
|
e5fb90f017 | ||
|
|
a3501b925d | ||
|
|
e47b0f3c44 | ||
|
|
518f7c9e03 | ||
|
|
91d57ded8a | ||
|
|
86fd1be8b2 | ||
|
|
e18deb5eeb | ||
|
|
a8fd7cf2e9 | ||
|
|
25182de365 | ||
|
|
c90efb8d0a | ||
|
|
7a521c655f | ||
|
|
8a9d2a032e | ||
|
|
b1b3ff2637 | ||
|
|
752f9e77f5 | ||
|
|
e52db0b2f5 | ||
|
|
ce9069af4a | ||
|
|
31d74730c5 | ||
|
|
84867c6e67 | ||
|
|
f7d7b3fe5e | ||
|
|
356c0c8be1 | ||
|
|
f957c42e26 | ||
|
|
ca311e5dec | ||
|
|
3ccd4ad2ca | ||
|
|
867fb30286 | ||
|
|
a8ab876053 | ||
|
|
dfc5627031 | ||
|
|
542542f48d | ||
|
|
5fda29e651 | ||
|
|
c25a07c95b | ||
|
|
aafc850dbd | ||
|
|
41ae8de99b | ||
|
|
808f070df4 | ||
|
|
e4cd66be5c | ||
|
|
a638e991af | ||
|
|
d67dbaa0ae | ||
|
|
be66ff1eec | ||
|
|
603a8acdfc | ||
|
|
86393d728f | ||
|
|
e564e051a3 | ||
|
|
2ade7dc632 | ||
|
|
a938845aa2 | ||
|
|
ba2205867e | ||
|
|
d7ec04f995 | ||
|
|
91baabd1b3 | ||
|
|
c3fdabd3a2 | ||
|
|
32611e07c8 | ||
|
|
dd3316a1c2 | ||
|
|
1ebd54b282 | ||
|
|
e524197a4d | ||
|
|
f111c49cc6 | ||
|
|
c567ee2c08 | ||
|
|
1a0ac7bb35 | ||
|
|
34c69cf10f | ||
|
|
88125634d2 | ||
|
|
200023234c | ||
|
|
fc99d6f0a6 | ||
|
|
e6b4ecdaf7 | ||
|
|
ade47bed8b | ||
|
|
ea12ec9cd1 | ||
|
|
3774e3bca0 | ||
|
|
3c31391ec9 | ||
|
|
9df54238d9 | ||
|
|
fcd934319d | ||
|
|
48ccc7ff06 | ||
|
|
edc48f0017 | ||
|
|
2f2800c360 | ||
|
|
7eb3ad98fa |
186
.appveyor.yml
Normal file
186
.appveyor.yml
Normal file
@@ -0,0 +1,186 @@
|
||||
environment:
|
||||
|
||||
global:
|
||||
# SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the
|
||||
# /E:ON and /V:ON options are not enabled in the batch script intepreter
|
||||
# See: http://stackoverflow.com/a/13751649/163740
|
||||
CMD_IN_ENV: "cmd /E:ON /V:ON /C .\\appveyor\\run_with_env.cmd"
|
||||
|
||||
matrix:
|
||||
|
||||
# Python 2.7.10 is the latest version and is not pre-installed.
|
||||
|
||||
- PYTHON: "C:\\Python27.10"
|
||||
PYTHON_VERSION: "2.7.10"
|
||||
PYTHON_ARCH: "32"
|
||||
|
||||
#- PYTHON: "C:\\Python27.10-x64"
|
||||
# PYTHON_VERSION: "2.7.10"
|
||||
# PYTHON_ARCH: "64"
|
||||
|
||||
# Pre-installed Python versions, which Appveyor may upgrade to
|
||||
# a later point release.
|
||||
# See: http://www.appveyor.com/docs/installed-software#python
|
||||
|
||||
#- PYTHON: "C:\\Python27"
|
||||
# PYTHON_VERSION: "2.7.x" # currently 2.7.9
|
||||
# PYTHON_ARCH: "32"
|
||||
|
||||
#- PYTHON: "C:\\Python27-x64"
|
||||
# PYTHON_VERSION: "2.7.x" # currently 2.7.9
|
||||
# PYTHON_ARCH: "64"
|
||||
|
||||
#- PYTHON: "C:\\Python33"
|
||||
# PYTHON_VERSION: "3.3.x" # currently 3.3.5
|
||||
# PYTHON_ARCH: "32"
|
||||
|
||||
#- PYTHON: "C:\\Python33-x64"
|
||||
# PYTHON_VERSION: "3.3.x" # currently 3.3.5
|
||||
# PYTHON_ARCH: "64"
|
||||
|
||||
#- PYTHON: "C:\\Python34"
|
||||
# PYTHON_VERSION: "3.4.x" # currently 3.4.3
|
||||
# PYTHON_ARCH: "32"
|
||||
|
||||
#- PYTHON: "C:\\Python34-x64"
|
||||
# PYTHON_VERSION: "3.4.x" # currently 3.4.3
|
||||
# PYTHON_ARCH: "64"
|
||||
|
||||
# Python versions not pre-installed
|
||||
|
||||
# Python 2.6.6 is the latest Python 2.6 with a Windows installer
|
||||
# See: https://github.com/ogrisel/python-appveyor-demo/issues/10
|
||||
|
||||
#- PYTHON: "C:\\Python266"
|
||||
# PYTHON_VERSION: "2.6.6"
|
||||
# PYTHON_ARCH: "32"
|
||||
|
||||
#- PYTHON: "C:\\Python266-x64"
|
||||
# PYTHON_VERSION: "2.6.6"
|
||||
# PYTHON_ARCH: "64"
|
||||
|
||||
#- PYTHON: "C:\\Python35"
|
||||
# PYTHON_VERSION: "3.5.0"
|
||||
# PYTHON_ARCH: "32"
|
||||
|
||||
#- PYTHON: "C:\\Python35-x64"
|
||||
# PYTHON_VERSION: "3.5.0"
|
||||
# PYTHON_ARCH: "64"
|
||||
|
||||
# Major and minor releases (i.e x.0.0 and x.y.0) prior to 3.3.0 use
|
||||
# a different naming scheme.
|
||||
|
||||
#- PYTHON: "C:\\Python270"
|
||||
# PYTHON_VERSION: "2.7.0"
|
||||
# PYTHON_ARCH: "32"
|
||||
|
||||
#- PYTHON: "C:\\Python270-x64"
|
||||
# PYTHON_VERSION: "2.7.0"
|
||||
# PYTHON_ARCH: "64"
|
||||
|
||||
install:
|
||||
# If there is a newer build queued for the same PR, cancel this one.
|
||||
# The AppVeyor 'rollout builds' option is supposed to serve the same
|
||||
# purpose but it is problematic because it tends to cancel builds pushed
|
||||
# directly to master instead of just PR builds (or the converse).
|
||||
# credits: JuliaLang developers.
|
||||
- ps: if ($env:APPVEYOR_PULL_REQUEST_NUMBER -and $env:APPVEYOR_BUILD_NUMBER -ne ((Invoke-RestMethod `
|
||||
https://ci.appveyor.com/api/projects/$env:APPVEYOR_ACCOUNT_NAME/$env:APPVEYOR_PROJECT_SLUG/history?recordsNumber=50).builds | `
|
||||
Where-Object pullRequestId -eq $env:APPVEYOR_PULL_REQUEST_NUMBER)[0].buildNumber) { `
|
||||
throw "There are newer queued builds for this pull request, failing early." }
|
||||
|
||||
# Install wxPython
|
||||
- 'ECHO Downloading wxPython.'
|
||||
- "appveyor DownloadFile https://goo.gl/yvO8PB -FileName C:\\wxpython.exe"
|
||||
#- "appveyor DownloadFile https://goo.gl/Uj0jV3 -FileName C:\\wxpython64.exe"
|
||||
|
||||
- 'ECHO Install wxPython'
|
||||
- "C:\\wxpython.exe /SP- /VERYSILENT /NORESTART"
|
||||
#- "C:\\wxpython64.exe /SP- /VERYSILENT /NORESTART"
|
||||
|
||||
- ECHO "Filesystem root:"
|
||||
- ps: "ls \"C:/\""
|
||||
|
||||
- ECHO "Filesystem pyfa root:"
|
||||
- ps: "ls \"C:\\projects\\pyfa\\\""
|
||||
|
||||
- ECHO "Installed SDKs:"
|
||||
- ps: "ls \"C:/Program Files/Microsoft SDKs/Windows\""
|
||||
|
||||
# Install Python (from the official .msi of http://python.org) and pip when
|
||||
# not already installed.
|
||||
# - ps: if (-not(Test-Path($env:PYTHON))) { & appveyor\install.ps1 }
|
||||
|
||||
# Prepend newly installed Python to the PATH of this build (this cannot be
|
||||
# done from inside the powershell script as it would require to restart
|
||||
# the parent CMD process).
|
||||
- "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
|
||||
|
||||
# Check that we have the expected version and architecture for Python
|
||||
- "python --version"
|
||||
- "python -c \"import struct; print(struct.calcsize('P') * 8)\""
|
||||
|
||||
# Upgrade to the latest version of pip to avoid it displaying warnings
|
||||
# about it being out of date.
|
||||
- "pip install --disable-pip-version-check --user --upgrade pip"
|
||||
|
||||
# Install the build dependencies of the project. If some dependencies contain
|
||||
# compiled extensions and are not provided as pre-built wheel packages,
|
||||
# pip will build them from source using the MSVC compiler matching the
|
||||
# target Python version and architecture
|
||||
# C:\\projects\\eve-gnosis\\
|
||||
- ECHO "Install pip requirements:"
|
||||
- "pip install -r requirements.txt"
|
||||
- "pip install -r requirements_test.txt"
|
||||
- "pip install -r requirements_build_windows.txt"
|
||||
|
||||
build_script:
|
||||
# Build the compiled extension
|
||||
# - "python setup.py build"
|
||||
- ECHO "Build pyfa:"
|
||||
#- copy C:\projects\pyfa\dist_assets\win\pyfa.spec C:\projects\pyfa\pyfa.spec
|
||||
- "python C:\\projects\\pyfa\\setup.py build"
|
||||
|
||||
#- ECHO "Build pyfa (Debug):"
|
||||
#- copy C:\projects\pyfa\dist_assets\win\pyfa_debug.spec C:\projects\pyfa\pyfa_debug.spec
|
||||
#- "pyinstaller.exe --clean --noconfirm --windowed --upx-dir=C:\\projects\\pyfa\\scripts\\upx.exe C:\\projects\\pyfa\\pyfa_debug.spec"
|
||||
|
||||
build: on
|
||||
|
||||
after_build:
|
||||
- ps: "ls \"./\""
|
||||
#- ps: "ls \"C:\\projects\\pyfa\\build\\pyfa\\\""
|
||||
- ps: "ls \"C:\\projects\\pyfa\\build\\\""
|
||||
- ps: "ls \"C:\\projects\\pyfa\\build\\exe.win32-2.7\\\""
|
||||
# Zip
|
||||
# APPVEYOR_PULL_REQUEST_NUMBER -and $env:APPVEYOR_BUILD_NUMBER
|
||||
#- 7z a build.zip -r C:\projects\pyfa\build\pyfa\*.*
|
||||
- 7z a pyfa.zip -r C:\projects\pyfa\build\exe.win32-2.7\*.*
|
||||
#- 7z a pyfa_debug.zip -r C:\projects\pyfa\dist\pyfa_debug\*.*
|
||||
|
||||
on_success:
|
||||
# Do nothing right now
|
||||
|
||||
test_script:
|
||||
#- tox
|
||||
#- "py.test --cov=./"
|
||||
# Run the project tests
|
||||
# - "%CMD_IN_ENV% python C:/projects/eve-gnosis/setup.py nosetests"
|
||||
|
||||
after_test:
|
||||
# If tests are successful, create binary packages for the project.
|
||||
# - "%CMD_IN_ENV% python setup.py bdist_wheel"
|
||||
# - "%CMD_IN_ENV% python setup.py bdist_wininst"
|
||||
# - "%CMD_IN_ENV% python setup.py bdist_msi"
|
||||
# - ps: "ls dist"
|
||||
|
||||
artifacts:
|
||||
# Archive the generated packages in the ci.appveyor.com build report.
|
||||
- path: pyfa.zip
|
||||
name: 'pyfa.zip'
|
||||
#- path: pyfa_debug.zip
|
||||
# name: Pyfa_debug
|
||||
|
||||
#on_success:
|
||||
# - TODO: upload the content of dist/*.whl to a public wheelhouse
|
||||
#
|
||||
2
.gitattributes
vendored
2
.gitattributes
vendored
@@ -14,7 +14,7 @@
|
||||
*.py3 text eol=crlf
|
||||
*.pyw text eol=crlf
|
||||
*.pyx text eol=crlf
|
||||
|
||||
pyfa.py text eol=lf
|
||||
|
||||
# Denote all files that are truly binary and should not be modified.
|
||||
# Binary files
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -49,7 +49,6 @@ Pyfa.egg-info/
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
@@ -118,3 +117,8 @@ ENV/
|
||||
# Pycharm project settings
|
||||
.idea
|
||||
eos.iml
|
||||
gitversion
|
||||
.version
|
||||
/.version
|
||||
*.swp
|
||||
|
||||
|
||||
14
.mailmap
Normal file
14
.mailmap
Normal file
@@ -0,0 +1,14 @@
|
||||
cncfanatics <diego.duclos@gmail.com> cncfanatics <cncfanatics@titanium.(none)>
|
||||
blitzmann <holmes.ryan.90@gmail.com>
|
||||
blitzmann <holmes.ryan.90@gmail.com> blitzmann <ryan.xgamer99@gmail.com>
|
||||
blitzmann <holmes.ryan.90@gmail.com>
|
||||
blitzmann <holmes.ryan.90@gmail.com> blitzman <ryan.xgamer99@gmail.com>
|
||||
blitzmann <holmes.ryan.90@gmail.com> Ryan Holmes <ryan.holmes.90@gmail.com>
|
||||
blitzmann <holmes.ryan.90@gmail.com>
|
||||
Corollax <corollax@gmail.com> Corollax <corollax@corollax-laptop.(none)>
|
||||
Corollax <corollax@gmail.com> Corollax <corollax@corollax-N76VM.(none)>
|
||||
Mr. Nukealizer <mr.nukealizer@gmail.com> Mr. Nukealizer <MrNukealizer@users.noreply.github.com>
|
||||
DarkPhoenix <phoenix@mail.ru>
|
||||
Sakari Orisi <sakari@evefit.org>
|
||||
Will Wykeham <will@wykeham.net> Will Wykeham <will.wykeham@paconsulting.com>
|
||||
OISumeko <camerongrout@gmail.com> OISumeko <cameron@sporadic.co.nz>
|
||||
32
.travis.yml
32
.travis.yml
@@ -1,4 +1,5 @@
|
||||
language: python
|
||||
cache: pip
|
||||
python:
|
||||
- '2.7'
|
||||
env:
|
||||
@@ -6,20 +7,33 @@ env:
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
# for wxPython:
|
||||
- python-wxgtk2.8
|
||||
- python-wxtools
|
||||
- wx2.8-doc
|
||||
- wx2.8-examples
|
||||
- wx2.8-headers
|
||||
- wx2.8-i18n
|
||||
before_install:
|
||||
- pip install -U tox
|
||||
- sudo apt-get update && sudo apt-get --reinstall install -qq language-pack-en language-pack-ru language-pack-he language-pack-zh-hans
|
||||
- pip install tox
|
||||
# We're not actually installing Tox, but have to run it before we install wxPython via Conda. This is fugly but vOv
|
||||
- tox
|
||||
# get Conda
|
||||
- if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then
|
||||
wget https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh;
|
||||
else
|
||||
wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh;
|
||||
fi
|
||||
- bash miniconda.sh -b -p $HOME/miniconda
|
||||
- export PATH="$HOME/miniconda/bin:$PATH"
|
||||
- hash -r
|
||||
- conda config --set always_yes yes --set changeps1 no
|
||||
- conda update -q conda
|
||||
# Useful for debugging any issues with conda
|
||||
- conda info -a
|
||||
install:
|
||||
# install wxPython 3.0.0.0
|
||||
- conda install -c https://conda.anaconda.org/travis wxpython
|
||||
before_script:
|
||||
- pip install -r requirements.txt
|
||||
- pip install -r requirements_test.txt
|
||||
script:
|
||||
- tox
|
||||
- py.test --cov=./
|
||||
after_success:
|
||||
- bash <(curl -s https://codecov.io/bash)
|
||||
before_deploy:
|
||||
- pip install -r requirements_build_linux.txt
|
||||
|
||||
34
ISSUE_TEMPLATE.md
Normal file
34
ISSUE_TEMPLATE.md
Normal file
@@ -0,0 +1,34 @@
|
||||
<!--
|
||||
|
||||
Submit a bug report bug report or feature request
|
||||
|
||||
Here you can inform pyfa developers of potential bugs or suggest features / improvements to the project. Please check
|
||||
to make sure that the bug hasn't been reported or feature requested before submitting. If you have general questions
|
||||
about the project and want to reach out to the developers personally, please check out out our [Slack]
|
||||
(https://pyfainvite.azurewebsites.net/).
|
||||
|
||||
-->
|
||||
|
||||
## Bug Report
|
||||
|
||||
|
||||
### Expected behavior:
|
||||
|
||||
|
||||
### Actual behavior:
|
||||
|
||||
|
||||
### Detailed steps to reproduce:
|
||||
|
||||
|
||||
### Fits involved in EFT format (Edit > To Clipboard > EFT):
|
||||
|
||||
|
||||
### Release or development git branch? Please note the release version or commit hash:
|
||||
|
||||
|
||||
### Operating system and version (eg: Windows 10, OS X 10.9, OS X 10.11, Ubuntu 16.10):
|
||||
|
||||
|
||||
### Other relevant information:
|
||||
|
||||
13
README.md
13
README.md
@@ -1,6 +1,6 @@
|
||||
# pyfa
|
||||
|
||||
[](https://pyfainvite.azurewebsites.net/)
|
||||
[](https://pyfainvite.azurewebsites.net/) [](https://travis-ci.org/pyfa-org/Pyfa)
|
||||
|
||||

|
||||
|
||||
@@ -36,12 +36,8 @@ The following is a list of pyfa packages available for certain distributions. Pl
|
||||
### Dependencies
|
||||
If you wish to help with development or simply need to run pyfa through a Python interpreter, the following software is required:
|
||||
|
||||
* Python 2.7
|
||||
* `wxPython` 2.8/3.0
|
||||
* `sqlalchemy` >= 1.0.5
|
||||
* `dateutil`
|
||||
* `matplotlib` (for some Linux distributions you may need to install separate wxPython bindings such as `python-matplotlib-wx`)
|
||||
* `requests`
|
||||
* Python 3.6
|
||||
* Requirements as listed in `requirements.txt`
|
||||
|
||||
## Bug Reporting
|
||||
The preferred method of reporting bugs is through the project's [GitHub Issues interface](https://github.com/pyfa-org/Pyfa/issues). Alternatively, posting a report in the [pyfa thread](http://forums.eveonline.com/default.aspx?g=posts&t=247609) on the official EVE Online forums is acceptable. Guidelines for bug reporting can be found on [this wiki page](https://github.com/DarkFenX/Pyfa/wiki/Bug-Reporting).
|
||||
@@ -51,8 +47,7 @@ pyfa is licensed under the GNU GPL v3.0, see LICENSE
|
||||
|
||||
## Resources
|
||||
* Development repository: [https://github.com/pyfa-org/Pyfa](https://github.com/pyfa-org/Pyfa)
|
||||
* XMPP conference: [pyfa@conference.jabber.org](pyfa@conference.jabber.org)
|
||||
* [EVE forum thread](http://forums.eveonline.com/default.aspx?g=posts&t=247609)
|
||||
* [EVE forum thread](https://forums.eveonline.com/t/27156)
|
||||
* [EVE University guide using pyfa](http://wiki.eveuniversity.org/Guide_to_using_PYFA)
|
||||
* [EVE Online website](http://www.eveonline.com/)
|
||||
|
||||
|
||||
13
_development/Pyfa_CodeStyle.xml
Normal file
13
_development/Pyfa_CodeStyle.xml
Normal file
@@ -0,0 +1,13 @@
|
||||
<code_scheme name="Pyfa">
|
||||
<option name="LINE_SEPARATOR" value="
" />
|
||||
<option name="RIGHT_MARGIN" value="165" />
|
||||
<Python>
|
||||
<option name="NEW_LINE_AFTER_COLON" value="true" />
|
||||
<option name="DICT_ALIGNMENT" value="2" />
|
||||
<option name="DICT_NEW_LINE_AFTER_LEFT_BRACE" value="true" />
|
||||
<option name="DICT_NEW_LINE_BEFORE_RIGHT_BRACE" value="true" />
|
||||
<option name="USE_CONTINUATION_INDENT_FOR_ARGUMENTS" value="true" />
|
||||
<option name="OPTIMIZE_IMPORTS_SORT_NAMES_IN_FROM_IMPORTS" value="true" />
|
||||
<option name="OPTIMIZE_IMPORTS_JOIN_FROM_IMPORTS_WITH_SAME_SOURCE" value="true" />
|
||||
</Python>
|
||||
</code_scheme>
|
||||
54
_development/Pyfa_Inspections.xml
Normal file
54
_development/Pyfa_Inspections.xml
Normal file
@@ -0,0 +1,54 @@
|
||||
<profile version="1.0">
|
||||
<option name="myName" value="Pyfa" />
|
||||
<inspection_tool class="IgnoreUnusedEntry" enabled="false" level="UNUSED ENTRY" enabled_by_default="false" />
|
||||
<inspection_tool class="InconsistentLineSeparators" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||
<inspection_tool class="ProblematicWhitespace" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="PyBehaveInspection" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="PyClassicStyleClassInspection" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="PyCompatibilityInspection" enabled="true" level="WARNING" enabled_by_default="true">
|
||||
<option name="ourVersions">
|
||||
<value>
|
||||
<list size="1">
|
||||
<item index="0" class="java.lang.String" itemvalue="2.7" />
|
||||
</list>
|
||||
</value>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
<inspection_tool class="PyMissingTypeHintsInspection" enabled="true" level="WEAK WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="PyPackageRequirementsInspection" enabled="true" level="WARNING" enabled_by_default="true">
|
||||
<option name="ignoredPackages">
|
||||
<value>
|
||||
<list size="1">
|
||||
<item index="0" class="java.lang.String" itemvalue="wxPython" />
|
||||
</list>
|
||||
</value>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
<inspection_tool class="PyPep8Inspection" enabled="true" level="TYPO" enabled_by_default="true">
|
||||
<option name="ignoredErrors">
|
||||
<list>
|
||||
<option value="E203" />
|
||||
<option value="E127" />
|
||||
<option value="E128" />
|
||||
<option value="E126" />
|
||||
</list>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
<inspection_tool class="PyPep8NamingInspection" enabled="true" level="TYPO" enabled_by_default="true">
|
||||
<option name="ignoredErrors">
|
||||
<list>
|
||||
<option value="N802" />
|
||||
<option value="N806" />
|
||||
<option value="N803" />
|
||||
<option value="N814" />
|
||||
</list>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
<inspection_tool class="PyShadowingBuiltinsInspection" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="PyShadowingNamesInspection" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||
<inspection_tool class="SpellCheckingInspection" enabled="false" level="TYPO" enabled_by_default="false">
|
||||
<option name="processCode" value="true" />
|
||||
<option name="processLiterals" value="true" />
|
||||
<option name="processComments" value="true" />
|
||||
</inspection_tool>
|
||||
</profile>
|
||||
0
_development/__init__.py
Normal file
0
_development/__init__.py
Normal file
145
_development/helpers.py
Normal file
145
_development/helpers.py
Normal file
@@ -0,0 +1,145 @@
|
||||
# noinspection PyPackageRequirements
|
||||
import pytest
|
||||
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
|
||||
from sqlalchemy import MetaData, create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
# Add root folder to python paths
|
||||
sys.path.append(os.path.realpath(os.path.join(script_dir, '..', '..')))
|
||||
sys._called_from_test = True
|
||||
|
||||
# noinspection PyUnresolvedReferences,PyUnusedLocal
|
||||
@pytest.fixture
|
||||
def DBInMemory_test():
|
||||
def rollback():
|
||||
with sd_lock:
|
||||
saveddata_session.rollback()
|
||||
|
||||
|
||||
print("Creating database in memory")
|
||||
from os.path import realpath, join, dirname, abspath
|
||||
|
||||
debug = False
|
||||
gamedataCache = True
|
||||
saveddataCache = True
|
||||
gamedata_version = ""
|
||||
gamedata_connectionstring = 'sqlite:///' + realpath(join(dirname(abspath(str(__file__))), "..", "eve.db"))
|
||||
saveddata_connectionstring = 'sqlite:///:memory:'
|
||||
|
||||
class ReadOnlyException(Exception):
|
||||
pass
|
||||
|
||||
if callable(gamedata_connectionstring):
|
||||
gamedata_engine = create_engine("sqlite://", creator=gamedata_connectionstring, echo=debug)
|
||||
else:
|
||||
gamedata_engine = create_engine(gamedata_connectionstring, echo=debug)
|
||||
|
||||
gamedata_meta = MetaData()
|
||||
gamedata_meta.bind = gamedata_engine
|
||||
gamedata_session = sessionmaker(bind=gamedata_engine, autoflush=False, expire_on_commit=False)()
|
||||
|
||||
# This should be moved elsewhere, maybe as an actual query. Current, without try-except, it breaks when making a new
|
||||
# game db because we haven't reached gamedata_meta.create_all()
|
||||
try:
|
||||
gamedata_version = gamedata_session.execute(
|
||||
"SELECT `field_value` FROM `metadata` WHERE `field_name` LIKE 'client_build'"
|
||||
).fetchone()[0]
|
||||
except Exception as e:
|
||||
print("Missing gamedata version.")
|
||||
gamedata_version = None
|
||||
|
||||
if saveddata_connectionstring is not None:
|
||||
if callable(saveddata_connectionstring):
|
||||
saveddata_engine = create_engine(creator=saveddata_connectionstring, echo=debug)
|
||||
else:
|
||||
saveddata_engine = create_engine(saveddata_connectionstring, echo=debug)
|
||||
|
||||
saveddata_meta = MetaData()
|
||||
saveddata_meta.bind = saveddata_engine
|
||||
saveddata_session = sessionmaker(bind=saveddata_engine, autoflush=False, expire_on_commit=False)()
|
||||
else:
|
||||
saveddata_meta = None
|
||||
|
||||
# Lock controlling any changes introduced to session
|
||||
sd_lock = threading.Lock()
|
||||
|
||||
# Import all the definitions for all our database stuff
|
||||
# noinspection PyPep8
|
||||
#from eos.db.gamedata import alphaClones, attribute, category, effect, group, icon, item, marketGroup, metaData, metaGroup, queries, traits, unit
|
||||
# noinspection PyPep8
|
||||
#from eos.db.saveddata import booster, cargo, character, crest, damagePattern, databaseRepair, drone, fighter, fit, implant, implantSet, loadDefaultDatabaseValues, miscData, module, override, price, queries, skill, targetResists, user
|
||||
|
||||
# If using in memory saveddata, you'll want to reflect it so the data structure is good.
|
||||
if saveddata_connectionstring == "sqlite:///:memory:":
|
||||
saveddata_meta.create_all()
|
||||
|
||||
# Output debug info to help us troubleshoot Travis
|
||||
print(saveddata_engine)
|
||||
print(gamedata_engine)
|
||||
|
||||
helper = {
|
||||
#'config': eos.config,
|
||||
'gamedata_session' : gamedata_session,
|
||||
'saveddata_session' : saveddata_session,
|
||||
}
|
||||
return helper
|
||||
|
||||
# noinspection PyUnresolvedReferences,PyUnusedLocal
|
||||
@pytest.fixture
|
||||
def DBInMemory():
|
||||
print("Creating database in memory")
|
||||
|
||||
import eos.config
|
||||
|
||||
import eos
|
||||
import eos.db
|
||||
|
||||
# Output debug info to help us troubleshoot Travis
|
||||
print((eos.db.saveddata_engine))
|
||||
print((eos.db.gamedata_engine))
|
||||
|
||||
helper = {
|
||||
'config': eos.config,
|
||||
'db' : eos.db,
|
||||
'gamedata_session' : eos.db.gamedata_session,
|
||||
'saveddata_session' : eos.db.saveddata_session,
|
||||
}
|
||||
return helper
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def Gamedata():
|
||||
print("Building Gamedata")
|
||||
from eos.gamedata import Item
|
||||
|
||||
helper = {
|
||||
'Item': Item,
|
||||
}
|
||||
return helper
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def Saveddata():
|
||||
print("Building Saveddata")
|
||||
from eos.saveddata.ship import Ship
|
||||
from eos.saveddata.fit import Fit
|
||||
from eos.saveddata.character import Character
|
||||
from eos.saveddata.module import Module, State
|
||||
from eos.saveddata.citadel import Citadel
|
||||
from eos.saveddata.booster import Booster
|
||||
|
||||
helper = {
|
||||
'Structure': Citadel,
|
||||
'Ship' : Ship,
|
||||
'Fit' : Fit,
|
||||
'Character': Character,
|
||||
'Module' : Module,
|
||||
'State' : State,
|
||||
'Booster' : Booster,
|
||||
}
|
||||
return helper
|
||||
66
_development/helpers_fits.py
Normal file
66
_development/helpers_fits.py
Normal file
@@ -0,0 +1,66 @@
|
||||
import pytest
|
||||
|
||||
# noinspection PyPackageRequirements
|
||||
from _development.helpers import DBInMemory as DB, Gamedata, Saveddata
|
||||
|
||||
|
||||
# noinspection PyShadowingNames
|
||||
@pytest.fixture
|
||||
def RifterFit(DB, Gamedata, Saveddata):
|
||||
print("Creating Rifter")
|
||||
item = DB['gamedata_session'].query(Gamedata['Item']).filter(Gamedata['Item'].name == "Rifter").first()
|
||||
ship = Saveddata['Ship'](item)
|
||||
# setup fit
|
||||
fit = Saveddata['Fit'](ship, "My Rifter Fit")
|
||||
|
||||
return fit
|
||||
|
||||
|
||||
# noinspection PyShadowingNames
|
||||
@pytest.fixture
|
||||
def KeepstarFit(DB, Gamedata, Saveddata):
|
||||
print("Creating Keepstar")
|
||||
item = DB['gamedata_session'].query(Gamedata['Item']).filter(Gamedata['Item'].name == "Keepstar").first()
|
||||
ship = Saveddata['Structure'](item)
|
||||
# setup fit
|
||||
fit = Saveddata['Fit'](ship, "Keepstar Fit")
|
||||
|
||||
return fit
|
||||
|
||||
|
||||
# noinspection PyShadowingNames
|
||||
@pytest.fixture
|
||||
def CurseFit(DB, Gamedata, Saveddata):
|
||||
print("Creating Curse - With Neuts")
|
||||
item = DB['gamedata_session'].query(Gamedata['Item']).filter(Gamedata['Item'].name == "Curse").first()
|
||||
ship = Saveddata['Ship'](item)
|
||||
# setup fit
|
||||
fit = Saveddata['Fit'](ship, "Curse - With Neuts")
|
||||
|
||||
mod = Saveddata['Module'](DB['db'].getItem("Medium Energy Neutralizer II"))
|
||||
mod.state = Saveddata['State'].ONLINE
|
||||
|
||||
# Add 5 neuts
|
||||
for _ in range(5):
|
||||
fit.modules.append(mod)
|
||||
|
||||
return fit
|
||||
|
||||
|
||||
# noinspection PyShadowingNames
|
||||
@pytest.fixture
|
||||
def HeronFit(DB, Gamedata, Saveddata):
|
||||
print("Creating Heron - RemoteSebo")
|
||||
item = DB['gamedata_session'].query(Gamedata['Item']).filter(Gamedata['Item'].name == "Heron").first()
|
||||
ship = Saveddata['Ship'](item)
|
||||
# setup fit
|
||||
fit = Saveddata['Fit'](ship, "Heron - RemoteSebo")
|
||||
|
||||
mod = Saveddata['Module'](DB['db'].getItem("Remote Sensor Booster II"))
|
||||
mod.state = Saveddata['State'].ONLINE
|
||||
|
||||
# Add 5 neuts
|
||||
for _ in range(4):
|
||||
fit.modules.append(mod)
|
||||
|
||||
return fit
|
||||
12
_development/helpers_items.py
Normal file
12
_development/helpers_items.py
Normal file
@@ -0,0 +1,12 @@
|
||||
import pytest
|
||||
|
||||
# noinspection PyPackageRequirements
|
||||
from _development.helpers import DBInMemory as DB, Gamedata, Saveddata
|
||||
|
||||
|
||||
# noinspection PyShadowingNames
|
||||
@pytest.fixture
|
||||
def StrongBluePillBooster (DB, Gamedata, Saveddata):
|
||||
print("Creating Strong Blue Pill Booster")
|
||||
item = DB['gamedata_session'].query(Gamedata['Item']).filter(Gamedata['Item'].name == "Strong Blue Pill Booster").first()
|
||||
return Saveddata['Booster'](item)
|
||||
101
_development/helpers_locale.py
Normal file
101
_development/helpers_locale.py
Normal file
@@ -0,0 +1,101 @@
|
||||
import os
|
||||
|
||||
# https://msdn.microsoft.com/en-us/library/windows/desktop/dd317756(v=vs.85).aspx
|
||||
windows_codecs = {
|
||||
'cp1252', # Standard Windows
|
||||
'cp1251', # Russian
|
||||
'cp037',
|
||||
'cp424',
|
||||
'cp437',
|
||||
'cp500',
|
||||
'cp720',
|
||||
'cp737',
|
||||
'cp775',
|
||||
'cp850',
|
||||
'cp852',
|
||||
'cp855',
|
||||
'cp856',
|
||||
'cp857',
|
||||
'cp858',
|
||||
'cp860',
|
||||
'cp861',
|
||||
'cp862',
|
||||
'cp863',
|
||||
'cp864',
|
||||
'cp865',
|
||||
'cp866',
|
||||
'cp869',
|
||||
'cp874',
|
||||
'cp875',
|
||||
'cp932',
|
||||
'cp949',
|
||||
'cp950',
|
||||
'cp1006',
|
||||
'cp1026',
|
||||
'cp1140',
|
||||
'cp1250',
|
||||
'cp1253',
|
||||
'cp1254',
|
||||
'cp1255',
|
||||
'cp1256',
|
||||
'cp1257',
|
||||
'cp1258',
|
||||
}
|
||||
|
||||
linux_codecs = {
|
||||
'utf_8', # Generic Linux/Mac
|
||||
}
|
||||
|
||||
mac_codecs = [
|
||||
'utf_8', # Generic Linux/Mac
|
||||
'mac_cyrillic',
|
||||
'mac_greek',
|
||||
'mac_iceland',
|
||||
'mac_latin2',
|
||||
'mac_roman',
|
||||
'mac_turkish',
|
||||
]
|
||||
|
||||
universal_codecs = [
|
||||
'utf_16', 'utf_32', 'utf_32_be', 'utf_32_le', 'utf_16_be', 'utf_16_le', 'utf_7', 'utf_8_sig',
|
||||
]
|
||||
|
||||
other_codecs = [
|
||||
'scii', 'big5', 'big5hkscs', 'euc_jp', 'euc_jis_2004', 'euc_jisx0213', 'euc_kr', 'gb2312', 'gbk', 'gb18030', 'hz', 'iso2022_jp', 'iso2022_jp_1',
|
||||
'iso2022_jp_2', 'iso2022_jp_2004', 'iso2022_jp_3', 'iso2022_jp_ext', 'iso2022_kr', 'latin_1', 'iso8859_2', 'iso8859_3', 'iso8859_4', 'iso8859_5',
|
||||
'iso8859_6', 'iso8859_7', 'iso8859_8', 'iso8859_9', 'iso8859_10', 'iso8859_11', 'iso8859_13', 'iso8859_14', 'iso8859_15', 'iso8859_16', 'johab', 'koi8_r',
|
||||
'koi8_u', 'ptcp154', 'shift_jis', 'shift_jis_2004', 'shift_jisx0213'
|
||||
]
|
||||
|
||||
system_names = {
|
||||
'Windows': windows_codecs,
|
||||
'Linux': linux_codecs,
|
||||
'Darwin': mac_codecs,
|
||||
}
|
||||
|
||||
|
||||
def GetPath(root, file=None, codec=None):
|
||||
# Replace this with the function we actually use for this
|
||||
path = os.path.realpath(os.path.abspath(root))
|
||||
|
||||
if file:
|
||||
path = os.path.join(path, file)
|
||||
|
||||
if codec:
|
||||
path = path.decode(codec)
|
||||
|
||||
return path
|
||||
|
||||
def GetUnicodePath(root, file=None, codec=None):
|
||||
# Replace this with the function we actually use for this
|
||||
path = os.path.realpath(os.path.abspath(root))
|
||||
|
||||
if file:
|
||||
path = os.path.join(path, file)
|
||||
|
||||
if codec:
|
||||
path = str(path, codec)
|
||||
else:
|
||||
path = str(path)
|
||||
|
||||
return path
|
||||
235
config.py
235
config.py
@@ -1,26 +1,29 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
# TODO: move all logging back to pyfa.py main loop
|
||||
# We moved it here just to avoid rebuilding windows skeleton for now (any change to pyfa.py needs it)
|
||||
import logging
|
||||
import logging.handlers
|
||||
from logbook import CRITICAL, DEBUG, ERROR, FingersCrossedHandler, INFO, Logger, NestedSetup, NullHandler, \
|
||||
StreamHandler, TimedRotatingFileHandler, WARNING
|
||||
|
||||
pyfalog = Logger(__name__)
|
||||
|
||||
# Load variable overrides specific to distribution type
|
||||
try:
|
||||
import configforced
|
||||
except ImportError:
|
||||
pyfalog.warning("Failed to import: configforced")
|
||||
configforced = None
|
||||
|
||||
|
||||
# Turns on debug mode
|
||||
debug = False
|
||||
# Defines if our saveddata will be in pyfa root or not
|
||||
saveInRoot = False
|
||||
|
||||
# Version data
|
||||
version = "1.26.1"
|
||||
|
||||
version = "2.0.0b4"
|
||||
tag = "git"
|
||||
expansionName = "YC118.10"
|
||||
expansionName = "YC120.2"
|
||||
expansionVersion = "1.2"
|
||||
evemonMinVersion = "4081"
|
||||
|
||||
@@ -28,22 +31,17 @@ pyfaPath = None
|
||||
savePath = None
|
||||
saveDB = None
|
||||
gameDB = None
|
||||
logPath = None
|
||||
loggingLevel = None
|
||||
logging_setup = None
|
||||
|
||||
|
||||
class StreamToLogger(object):
|
||||
"""
|
||||
Fake file-like stream object that redirects writes to a logger instance.
|
||||
From: http://www.electricmonk.nl/log/2011/08/14/redirect-stdout-and-stderr-to-a-logger-in-python/
|
||||
"""
|
||||
|
||||
def __init__(self, logger, log_level=logging.INFO):
|
||||
self.logger = logger
|
||||
self.log_level = log_level
|
||||
self.linebuf = ''
|
||||
|
||||
def write(self, buf):
|
||||
for line in buf.rstrip().splitlines():
|
||||
self.logger.log(self.log_level, line.rstrip())
|
||||
LOGLEVEL_MAP = {
|
||||
"critical": CRITICAL,
|
||||
"error": ERROR,
|
||||
"warning": WARNING,
|
||||
"info": INFO,
|
||||
"debug": DEBUG,
|
||||
}
|
||||
|
||||
|
||||
def isFrozen():
|
||||
@@ -58,73 +56,82 @@ def __createDirs(path):
|
||||
os.makedirs(path)
|
||||
|
||||
|
||||
def defPaths(customSavePath):
|
||||
def getPyfaRoot():
|
||||
if hasattr(sys, '_MEIPASS'):
|
||||
return sys._MEIPASS
|
||||
base = getattr(sys.modules['__main__'], "__file__", sys.executable) if isFrozen() else __file__
|
||||
root = os.path.dirname(os.path.realpath(os.path.abspath(base)))
|
||||
root = root
|
||||
return root
|
||||
|
||||
|
||||
def getVersion():
|
||||
if os.path.isfile(os.path.join(pyfaPath, '.version')):
|
||||
with open(os.path.join(pyfaPath, '.version')) as f:
|
||||
gitVersion = f.readline()
|
||||
return gitVersion
|
||||
# if no version file exists, then user is running from source or not an official build
|
||||
return version + " (git)"
|
||||
|
||||
|
||||
def getDefaultSave():
|
||||
return os.path.expanduser(os.path.join("~", ".pyfa"))
|
||||
|
||||
|
||||
def defPaths(customSavePath=None):
|
||||
global debug
|
||||
global pyfaPath
|
||||
global savePath
|
||||
global saveDB
|
||||
global gameDB
|
||||
global saveInRoot
|
||||
global logPath
|
||||
|
||||
if debug:
|
||||
logLevel = logging.DEBUG
|
||||
else:
|
||||
logLevel = logging.WARN
|
||||
pyfalog.debug("Configuring Pyfa")
|
||||
|
||||
# The main pyfa directory which contains run.py
|
||||
# Python 2.X uses ANSI by default, so we need to convert the character encoding
|
||||
pyfaPath = getattr(configforced, "pyfaPath", pyfaPath)
|
||||
if pyfaPath is None:
|
||||
pyfaPath = getPyfaPath()
|
||||
pyfaPath = getPyfaRoot()
|
||||
|
||||
# Where we store the saved fits etc, default is the current users home directory
|
||||
if saveInRoot is True:
|
||||
savePath = getattr(configforced, "savePath", None)
|
||||
if savePath is None:
|
||||
savePath = getPyfaPath("saveddata")
|
||||
savePath = os.path.join(pyfaPath, "saveddata")
|
||||
else:
|
||||
savePath = getattr(configforced, "savePath", None)
|
||||
if savePath is None:
|
||||
if customSavePath is None: # customSavePath is not overriden
|
||||
savePath = os.path.expanduser(os.path.join("~", ".pyfa"))
|
||||
savePath = getDefaultSave()
|
||||
else:
|
||||
savePath = customSavePath
|
||||
|
||||
__createDirs(savePath)
|
||||
|
||||
if isFrozen():
|
||||
certName = "cacert.pem"
|
||||
os.environ["REQUESTS_CA_BUNDLE"] = getPyfaPath(certName).encode('utf8')
|
||||
os.environ["SSL_CERT_FILE"] = getPyfaPath(certName).encode('utf8')
|
||||
|
||||
loggingFormat = '%(asctime)s %(name)-24s %(levelname)-8s %(message)s'
|
||||
logging.basicConfig(format=loggingFormat, level=logLevel)
|
||||
handler = logging.handlers.RotatingFileHandler(getSavePath("log.txt"), maxBytes=1000000, backupCount=3)
|
||||
formatter = logging.Formatter(loggingFormat)
|
||||
handler.setFormatter(formatter)
|
||||
logging.getLogger('').addHandler(handler)
|
||||
|
||||
logging.info("Starting pyfa")
|
||||
|
||||
if hasattr(sys, 'frozen'):
|
||||
stdout_logger = logging.getLogger('STDOUT')
|
||||
sl = StreamToLogger(stdout_logger, logging.INFO)
|
||||
sys.stdout = sl
|
||||
|
||||
# This interferes with cx_Freeze's own handling of exceptions. Find a way to fix this.
|
||||
# stderr_logger = logging.getLogger('STDERR')
|
||||
# sl = StreamToLogger(stderr_logger, logging.ERROR)
|
||||
# sys.stderr = sl
|
||||
# if isFrozen():
|
||||
# os.environ["REQUESTS_CA_BUNDLE"] = os.path.join(pyfaPath, "cacert.pem")
|
||||
# os.environ["SSL_CERT_FILE"] = os.path.join(pyfaPath, "cacert.pem")
|
||||
|
||||
# The database where we store all the fits etc
|
||||
saveDB = getSavePath("saveddata.db")
|
||||
saveDB = os.path.join(savePath, "saveddata-py3-dev.db")
|
||||
|
||||
# The database where the static EVE data from the datadump is kept.
|
||||
# This is not the standard sqlite datadump but a modified version created by eos
|
||||
# maintenance script
|
||||
gameDB = getPyfaPath("eve.db")
|
||||
gameDB = getattr(configforced, "gameDB", gameDB)
|
||||
if not gameDB:
|
||||
gameDB = os.path.join(pyfaPath, "eve.db")
|
||||
|
||||
# DON'T MODIFY ANYTHING BELOW!
|
||||
if debug:
|
||||
logFile = "pyfa_debug.log"
|
||||
else:
|
||||
logFile = "pyfa.log"
|
||||
|
||||
logPath = os.path.join(savePath, logFile)
|
||||
|
||||
# DON'T MODIFY ANYTHING BELOW
|
||||
import eos.config
|
||||
|
||||
# Caching modifiers, disable all gamedata caching, its unneeded.
|
||||
@@ -133,40 +140,100 @@ def defPaths(customSavePath):
|
||||
eos.config.saveddata_connectionstring = "sqlite:///" + saveDB + "?check_same_thread=False"
|
||||
eos.config.gamedata_connectionstring = "sqlite:///" + gameDB + "?check_same_thread=False"
|
||||
|
||||
print(eos.config.saveddata_connectionstring)
|
||||
print(eos.config.gamedata_connectionstring)
|
||||
|
||||
def getPyfaPath(Append=None):
|
||||
base = getattr(sys.modules['__main__'], "__file__", sys.executable) if isFrozen() else sys.argv[0]
|
||||
root = os.path.dirname(os.path.realpath(os.path.abspath(base)))
|
||||
|
||||
if Append:
|
||||
path = parsePath(root, Append)
|
||||
else:
|
||||
path = parsePath(root)
|
||||
|
||||
return path
|
||||
# initialize the settings
|
||||
from service.settings import EOSSettings
|
||||
eos.config.settings = EOSSettings.getInstance().EOSSettings # this is kind of confusing, but whatever
|
||||
|
||||
|
||||
def getSavePath(Append=None):
|
||||
root = savePath
|
||||
def defLogging():
|
||||
global debug
|
||||
global logPath
|
||||
global loggingLevel
|
||||
global logging_setup
|
||||
|
||||
if Append:
|
||||
path = parsePath(root, Append)
|
||||
else:
|
||||
path = parsePath(root)
|
||||
try:
|
||||
if debug:
|
||||
logging_setup = NestedSetup([
|
||||
# make sure we never bubble up to the stderr handler
|
||||
# if we run out of setup handling
|
||||
NullHandler(),
|
||||
StreamHandler(
|
||||
sys.stdout,
|
||||
bubble=False,
|
||||
level=loggingLevel
|
||||
),
|
||||
TimedRotatingFileHandler(
|
||||
logPath,
|
||||
level=0,
|
||||
backup_count=3,
|
||||
bubble=True,
|
||||
date_format='%Y-%m-%d',
|
||||
),
|
||||
])
|
||||
else:
|
||||
logging_setup = NestedSetup([
|
||||
# make sure we never bubble up to the stderr handler
|
||||
# if we run out of setup handling
|
||||
NullHandler(),
|
||||
FingersCrossedHandler(
|
||||
TimedRotatingFileHandler(
|
||||
logPath,
|
||||
level=0,
|
||||
backup_count=3,
|
||||
bubble=False,
|
||||
date_format='%Y-%m-%d',
|
||||
),
|
||||
action_level=ERROR,
|
||||
buffer_size=1000,
|
||||
# pull_information=True,
|
||||
# reset=False,
|
||||
)
|
||||
])
|
||||
except:
|
||||
print("Critical error attempting to setup logging. Falling back to console only.")
|
||||
logging_setup = NestedSetup([
|
||||
# make sure we never bubble up to the stderr handler
|
||||
# if we run out of setup handling
|
||||
NullHandler(),
|
||||
StreamHandler(
|
||||
sys.stdout,
|
||||
bubble=False
|
||||
)
|
||||
])
|
||||
|
||||
return path
|
||||
with logging_setup.threadbound():
|
||||
|
||||
|
||||
def parsePath(root, Append=None):
|
||||
if Append:
|
||||
path = os.path.join(root, Append)
|
||||
else:
|
||||
path = root
|
||||
|
||||
if type(path) == str: # leave unicode ones alone
|
||||
# Output all stdout (print) messages as warnings
|
||||
try:
|
||||
path = path.decode('utf8')
|
||||
except UnicodeDecodeError:
|
||||
path = path.decode('windows-1252')
|
||||
sys.stdout = LoggerWriter(pyfalog.warning)
|
||||
except:
|
||||
pyfalog.critical("Cannot redirect. Continuing without writing stdout to log.")
|
||||
|
||||
return path
|
||||
# Output all stderr (stacktrace) messages as critical
|
||||
try:
|
||||
sys.stderr = LoggerWriter(pyfalog.critical)
|
||||
except:
|
||||
pyfalog.critical("Cannot redirect. Continuing without writing stderr to log.")
|
||||
|
||||
|
||||
class LoggerWriter(object):
|
||||
def __init__(self, level):
|
||||
# self.level is really like using log.debug(message)
|
||||
# at least in my case
|
||||
self.level = level
|
||||
|
||||
def write(self, message):
|
||||
# if statement reduces the amount of newlines that are
|
||||
# printed to the logger
|
||||
if message.strip() != '':
|
||||
self.level(message.replace("\n", ""))
|
||||
|
||||
def flush(self):
|
||||
# create a flush method so things can be flushed when
|
||||
# the system wants to. Not sure if simply 'printing'
|
||||
# sys.stderr is the correct way to do it, but it seemed
|
||||
# to work properly for me.
|
||||
self.level(sys.stderr)
|
||||
|
||||
5666
dist_assets/cacert.pem
Normal file
5666
dist_assets/cacert.pem
Normal file
File diff suppressed because it is too large
Load Diff
70
dist_assets/linux/pyfa-linux.spec
Normal file
70
dist_assets/linux/pyfa-linux.spec
Normal file
@@ -0,0 +1,70 @@
|
||||
# -*- mode: python -*-
|
||||
|
||||
import os
|
||||
from itertools import chain
|
||||
import subprocess
|
||||
|
||||
label = subprocess.check_output([
|
||||
"git", "describe", "--tags"]).strip()
|
||||
|
||||
with open('gitversion', 'w+') as f:
|
||||
f.write(label.decode())
|
||||
|
||||
block_cipher = None
|
||||
|
||||
|
||||
added_files = [
|
||||
( 'imgs/gui/*.png', 'imgs/gui' ),
|
||||
( 'imgs/gui/*.gif', 'imgs/gui' ),
|
||||
( 'imgs/icons/*.png', 'imgs/icons' ),
|
||||
( 'imgs/renders/*.png', 'imgs/renders' ),
|
||||
( 'dist_assets/win/pyfa.ico', '.' ),
|
||||
( 'dist_assets/cacert.pem', '.' ),
|
||||
( 'eve.db', '.' ),
|
||||
( 'README.md', '.' ),
|
||||
( 'LICENSE', '.' ),
|
||||
( 'gitversion', '.' ),
|
||||
]
|
||||
|
||||
import_these = []
|
||||
|
||||
# Walk directories that do dynamic importing
|
||||
paths = ('eos/effects', 'eos/db/migrations', 'service/conversions')
|
||||
for root, folders, files in chain.from_iterable(os.walk(path) for path in paths):
|
||||
for file_ in files:
|
||||
if file_.endswith(".py") and not file_.startswith("_"):
|
||||
mod_name = "{}.{}".format(
|
||||
root.replace("/", "."),
|
||||
file_.split(".py")[0],
|
||||
)
|
||||
import_these.append(mod_name)
|
||||
|
||||
|
||||
a = Analysis(['pyfa.py'],
|
||||
pathex=[],
|
||||
binaries=[],
|
||||
datas=added_files,
|
||||
hiddenimports=import_these,
|
||||
hookspath=[],
|
||||
runtime_hooks=[],
|
||||
excludes=[],
|
||||
win_no_prefer_redirects=False,
|
||||
win_private_assemblies=False,
|
||||
cipher=block_cipher)
|
||||
pyz = PYZ(a.pure, a.zipped_data,
|
||||
cipher=block_cipher)
|
||||
exe = EXE(pyz,
|
||||
a.scripts,
|
||||
exclude_binaries=True,
|
||||
name='pyfa',
|
||||
debug=False,
|
||||
strip=False,
|
||||
upx=True,
|
||||
console=True )
|
||||
coll = COLLECT(exe,
|
||||
a.binaries,
|
||||
a.zipfiles,
|
||||
a.datas,
|
||||
strip=False,
|
||||
upx=True,
|
||||
name='pyfa')
|
||||
73
dist_assets/mac/pyfa.spec
Normal file
73
dist_assets/mac/pyfa.spec
Normal file
@@ -0,0 +1,73 @@
|
||||
# -*- mode: python -*-
|
||||
|
||||
import os
|
||||
from itertools import chain
|
||||
import subprocess
|
||||
import requests.certs
|
||||
|
||||
label = subprocess.check_output([
|
||||
"git", "describe", "--tags"]).strip()
|
||||
|
||||
with open('.version', 'w+') as f:
|
||||
f.write(label.decode())
|
||||
|
||||
block_cipher = None
|
||||
|
||||
added_files = [
|
||||
('../../imgs/gui/*.png', 'imgs/gui'),
|
||||
('../../imgs/gui/*.gif', 'imgs/gui'),
|
||||
('../../imgs/icons/*.png', 'imgs/icons'),
|
||||
('../../imgs/renders/*.png', 'imgs/renders'),
|
||||
('../../dist_assets/win/pyfa.ico', '.'),
|
||||
(requests.certs.where(), '.'), # is this needed anymore?
|
||||
('../../eve.db', '.'),
|
||||
('../../README.md', '.'),
|
||||
('../../LICENSE', '.'),
|
||||
('../../.version', '.'),
|
||||
]
|
||||
|
||||
|
||||
import_these = []
|
||||
|
||||
# Walk directories that do dynamic importing
|
||||
paths = ('eos/effects', 'eos/db/migrations', 'service/conversions')
|
||||
for root, folders, files in chain.from_iterable(os.walk(path) for path in paths):
|
||||
for file_ in files:
|
||||
if file_.endswith(".py") and not file_.startswith("_"):
|
||||
mod_name = "{}.{}".format(
|
||||
root.replace("/", "."),
|
||||
file_.split(".py")[0],
|
||||
)
|
||||
import_these.append(mod_name)
|
||||
|
||||
a = Analysis([r'../../pyfa.py'],
|
||||
pathex=[],
|
||||
binaries=[],
|
||||
datas=added_files,
|
||||
hiddenimports=import_these,
|
||||
hookspath=['dist_assets/pyinstaller_hooks'],
|
||||
runtime_hooks=[],
|
||||
excludes=[],
|
||||
win_no_prefer_redirects=False,
|
||||
win_private_assemblies=False,
|
||||
cipher=block_cipher)
|
||||
pyz = PYZ(a.pure, a.zipped_data,
|
||||
cipher=block_cipher)
|
||||
exe = EXE(pyz,
|
||||
a.scripts,
|
||||
a.binaries,
|
||||
a.zipfiles,
|
||||
a.datas,
|
||||
name='pyfa',
|
||||
debug=False,
|
||||
strip=False,
|
||||
upx=True,
|
||||
runtime_tmpdir=None,
|
||||
console=False ,
|
||||
icon='dist_assets/mac/pyfa.icns',
|
||||
)
|
||||
|
||||
app = BUNDLE(exe,
|
||||
name='pyfa.app',
|
||||
icon=None,
|
||||
bundle_identifier=None)
|
||||
78
dist_assets/pyinstaller_hooks/hook-matplotlib.backends.py
Normal file
78
dist_assets/pyinstaller_hooks/hook-matplotlib.backends.py
Normal file
@@ -0,0 +1,78 @@
|
||||
# This apes hook-matplotlib.backends.py, but REMOVES backends, all but
|
||||
# the ones in the list below.
|
||||
# Courtesy of https://github.com/bpteague/cytoflow/blob/70f9291/packaging/hook-matplotlib.backends.py
|
||||
|
||||
KEEP = ["WXAgg", "WX", "agg"]
|
||||
|
||||
from PyInstaller.compat import is_darwin
|
||||
from PyInstaller.utils.hooks import (
|
||||
eval_statement, exec_statement, logger)
|
||||
|
||||
|
||||
def get_matplotlib_backend_module_names():
|
||||
"""
|
||||
List the names of all matplotlib backend modules importable under the
|
||||
current Python installation.
|
||||
Returns
|
||||
----------
|
||||
list
|
||||
List of the fully-qualified names of all such modules.
|
||||
"""
|
||||
# Statement safely importing a single backend module.
|
||||
import_statement = """
|
||||
import os, sys
|
||||
# Preserve stdout.
|
||||
sys_stdout = sys.stdout
|
||||
try:
|
||||
# Redirect output printed by this importation to "/dev/null", preventing
|
||||
# such output from being erroneously interpreted as an error.
|
||||
with open(os.devnull, 'w') as dev_null:
|
||||
sys.stdout = dev_null
|
||||
__import__('%s')
|
||||
# If this is an ImportError, print this exception's message without a traceback.
|
||||
# ImportError messages are human-readable and require no additional context.
|
||||
except ImportError as exc:
|
||||
sys.stdout = sys_stdout
|
||||
print(exc)
|
||||
# Else, print this exception preceded by a traceback. traceback.print_exc()
|
||||
# prints to stderr rather than stdout and must not be called here!
|
||||
except Exception:
|
||||
sys.stdout = sys_stdout
|
||||
import traceback
|
||||
print(traceback.format_exc())
|
||||
"""
|
||||
|
||||
# List of the human-readable names of all available backends.
|
||||
backend_names = eval_statement(
|
||||
'import matplotlib; print(matplotlib.rcsetup.all_backends)')
|
||||
|
||||
# List of the fully-qualified names of all importable backend modules.
|
||||
module_names = []
|
||||
|
||||
# If the current system is not OS X and the "CocoaAgg" backend is available,
|
||||
# remove this backend from consideration. Attempting to import this backend
|
||||
# on non-OS X systems halts the current subprocess without printing output
|
||||
# or raising exceptions, preventing its reliable detection.
|
||||
if not is_darwin and 'CocoaAgg' in backend_names:
|
||||
backend_names.remove('CocoaAgg')
|
||||
|
||||
# For safety, attempt to import each backend in a unique subprocess.
|
||||
for backend_name in backend_names:
|
||||
if backend_name in KEEP:
|
||||
continue
|
||||
|
||||
module_name = 'matplotlib.backends.backend_%s' % backend_name.lower()
|
||||
stdout = exec_statement(import_statement % module_name)
|
||||
|
||||
# If no output was printed, this backend is importable.
|
||||
if not stdout:
|
||||
module_names.append(module_name)
|
||||
logger.info(' Matplotlib backend "%s": removed' % backend_name)
|
||||
|
||||
return module_names
|
||||
|
||||
# Freeze all importable backends, as PyInstaller is unable to determine exactly
|
||||
# which backends are required by the current program.
|
||||
e=get_matplotlib_backend_module_names()
|
||||
print(e)
|
||||
excludedimports = e
|
||||
81
dist_assets/win/pyfa.spec
Normal file
81
dist_assets/win/pyfa.spec
Normal file
@@ -0,0 +1,81 @@
|
||||
# -*- mode: python -*-
|
||||
|
||||
import os
|
||||
from itertools import chain
|
||||
import subprocess
|
||||
import requests.certs
|
||||
|
||||
label = subprocess.check_output([
|
||||
"git", "describe", "--tags"]).strip()
|
||||
|
||||
with open('.version', 'w+') as f:
|
||||
f.write(label.decode())
|
||||
|
||||
block_cipher = None
|
||||
|
||||
added_files = [
|
||||
('../../imgs/gui/*.png', 'imgs/gui'),
|
||||
('../../imgs/gui/*.gif', 'imgs/gui'),
|
||||
('../../imgs/icons/*.png', 'imgs/icons'),
|
||||
('../../imgs/renders/*.png', 'imgs/renders'),
|
||||
('../../dist_assets/win/pyfa.ico', '.'),
|
||||
(requests.certs.where(), '.'), # is this needed anymore?
|
||||
('../../eve.db', '.'),
|
||||
('../../README.md', '.'),
|
||||
('../../LICENSE', '.'),
|
||||
('../../.version', '.'),
|
||||
]
|
||||
|
||||
import_these = []
|
||||
|
||||
# Walk directories that do dynamic importing
|
||||
paths = ('eos/effects', 'eos/db/migrations', 'service/conversions')
|
||||
for root, folders, files in chain.from_iterable(os.walk(path) for path in paths):
|
||||
for file_ in files:
|
||||
if file_.endswith(".py") and not file_.startswith("_"):
|
||||
mod_name = "{}.{}".format(
|
||||
root.replace("/", "."),
|
||||
file_.split(".py")[0],
|
||||
)
|
||||
import_these.append(mod_name)
|
||||
|
||||
a = Analysis(['../../pyfa.py'],
|
||||
pathex=[
|
||||
# Need this, see https://github.com/pyinstaller/pyinstaller/issues/1566
|
||||
# To get this, download and install windows 10 SDK
|
||||
# If not building on Windows 10, this might be optional
|
||||
r'C:\Program Files (x86)\Windows Kits\10\Redist\ucrt\DLLs\x86'],
|
||||
binaries=[],
|
||||
datas=added_files,
|
||||
hiddenimports=import_these,
|
||||
hookspath=['dist_assets/pyinstaller_hooks'],
|
||||
runtime_hooks=[],
|
||||
excludes=['Tkinter'],
|
||||
win_no_prefer_redirects=False,
|
||||
win_private_assemblies=False,
|
||||
cipher=block_cipher)
|
||||
|
||||
pyz = PYZ(a.pure, a.zipped_data,
|
||||
cipher=block_cipher)
|
||||
|
||||
exe = EXE(pyz,
|
||||
a.scripts,
|
||||
exclude_binaries=True,
|
||||
debug=False,
|
||||
console=False,
|
||||
strip=False,
|
||||
upx=True,
|
||||
name='pyfa',
|
||||
icon='dist_assets/win/pyfa.ico',
|
||||
)
|
||||
|
||||
coll = COLLECT(
|
||||
exe,
|
||||
a.binaries,
|
||||
a.zipfiles,
|
||||
a.datas,
|
||||
strip=False,
|
||||
upx=True,
|
||||
name='pyfa',
|
||||
icon='dist_assets/win/pyfa.ico',
|
||||
)
|
||||
83
dist_assets/win/pyfa_debug.spec
Normal file
83
dist_assets/win/pyfa_debug.spec
Normal file
@@ -0,0 +1,83 @@
|
||||
# -*- mode: python -*-
|
||||
|
||||
# Note: This script is provided AS-IS for those that may be interested.
|
||||
# pyfa does not currently support pyInstaller (or any other build process) 100% at the moment
|
||||
|
||||
# Command line to build:
|
||||
# (Run from directory where pyfa.py and pyfa.spec lives.)
|
||||
# c:\Python27\scripts\pyinstaller.exe --clean --noconfirm --windowed --upx-dir=.\scripts\upx.exe pyfa.spec
|
||||
|
||||
# Don't forget to change the path to where your pyfa.py and pyfa.spec lives
|
||||
# pathex=['C:\\Users\\Ebag333\\Documents\\GitHub\\Ebag333\\Pyfa'],
|
||||
|
||||
import os
|
||||
|
||||
block_cipher = None
|
||||
|
||||
added_files = [
|
||||
( 'imgs/gui/*.png', 'imgs/gui' ),
|
||||
( 'imgs/gui/*.gif', 'imgs/gui' ),
|
||||
( 'imgs/icons/*.png', 'imgs/icons' ),
|
||||
( 'imgs/renders/*.png', 'imgs/renders' ),
|
||||
( 'dist_assets/win/pyfa.ico', '.' ),
|
||||
( 'dist_assets/cacert.pem', '.' ),
|
||||
( 'eve.db', '.' ),
|
||||
( 'README.md', '.' ),
|
||||
( 'LICENSE', '.' ),
|
||||
]
|
||||
|
||||
import_these = []
|
||||
|
||||
# Walk eos.effects and add all effects so we can import them properly
|
||||
for root, folders, files in os.walk("eos/effects"):
|
||||
for file_ in files:
|
||||
if file_.endswith(".py") and not file_.startswith("_"):
|
||||
mod_name = "{}.{}".format(
|
||||
root.replace("/", "."),
|
||||
file_.split(".py")[0],
|
||||
)
|
||||
import_these.append(mod_name)
|
||||
|
||||
a = Analysis(
|
||||
['pyfa.py'],
|
||||
pathex=['C:\\projects\\pyfa\\'],
|
||||
binaries=[],
|
||||
datas=added_files,
|
||||
hiddenimports=import_these,
|
||||
hookspath=[],
|
||||
runtime_hooks=[],
|
||||
excludes=[],
|
||||
win_no_prefer_redirects=False,
|
||||
win_private_assemblies=False,
|
||||
cipher=block_cipher,
|
||||
)
|
||||
|
||||
pyz = PYZ(
|
||||
a.pure,
|
||||
a.zipped_data,
|
||||
cipher=block_cipher,
|
||||
)
|
||||
|
||||
exe = EXE(pyz,
|
||||
a.scripts,
|
||||
exclude_binaries=True,
|
||||
debug=True,
|
||||
console=True,
|
||||
strip=False,
|
||||
upx=True,
|
||||
name='pyfa_debug',
|
||||
icon='dist_assets/win/pyfa.ico',
|
||||
onefile=False,
|
||||
)
|
||||
|
||||
coll = COLLECT(
|
||||
exe,
|
||||
a.binaries,
|
||||
a.zipfiles,
|
||||
a.datas,
|
||||
strip=False,
|
||||
upx=True,
|
||||
onefile=False,
|
||||
name='pyfa_debug',
|
||||
icon='dist_assets/win/pyfa.ico',
|
||||
)
|
||||
45
dist_assets/win/version_resource.py
Normal file
45
dist_assets/win/version_resource.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# UTF-8
|
||||
#
|
||||
# For more details about fixed file info 'ffi' see:
|
||||
# http://msdn.microsoft.com/en-us/library/ms646997.aspx
|
||||
VSVersionInfo(
|
||||
ffi=FixedFileInfo(
|
||||
# filevers and prodvers should be always a tuple with four items: (1, 2, 3, 4)
|
||||
# Set not needed items to zero 0.
|
||||
filevers=(1, 15, 1, 0),
|
||||
prodvers=(1, 15, 1, 0),
|
||||
# Contains a bitmask that specifies the valid bits 'flags'r
|
||||
mask=0x3f,
|
||||
# Contains a bitmask that specifies the Boolean attributes of the file.
|
||||
flags=0x0,
|
||||
# The operating system for which this file was designed.
|
||||
# 0x4 - NT and there is no need to change it.
|
||||
OS=0x40004,
|
||||
# The general type of file.
|
||||
# 0x1 - the file is an application.
|
||||
fileType=0x1,
|
||||
# The function of the file.
|
||||
# 0x0 - the function is not defined for this fileType
|
||||
subtype=0x0,
|
||||
# Creation date and time stamp.
|
||||
date=(0, 0)
|
||||
),
|
||||
kids=[
|
||||
StringFileInfo(
|
||||
[
|
||||
StringTable(
|
||||
u'040904E4',
|
||||
[StringStruct(u'LegalCopyright', u''),
|
||||
StringStruct(u'InternalName', u'pyfa.exe'),
|
||||
StringStruct(u'FileVersion', u'1.15.1.0'),
|
||||
StringStruct(u'CompanyName', u''),
|
||||
StringStruct(u'OriginalFilename', u'pyfa.exe'),
|
||||
StringStruct(u'ProductVersion', u'1.15.1.0'),
|
||||
StringStruct(u'FileDescription', u'Python fitting assistant'),
|
||||
StringStruct(u'LegalTrademarks', u''),
|
||||
StringStruct(u'Comments', u''),
|
||||
StringStruct(u'ProductName', u'pyfa')])
|
||||
]),
|
||||
VarFileInfo([VarStruct(u'Translation', [1033, 1252])])
|
||||
]
|
||||
)
|
||||
@@ -1,8 +1,2 @@
|
||||
version = "0.2.3"
|
||||
tag = "git"
|
||||
|
||||
|
||||
def test():
|
||||
import tests.runTests
|
||||
import unittest
|
||||
unittest.main(defaultTest="discover", testLoader=tests.runTests.loader)
|
||||
version = "0.2.3"
|
||||
tag = "git"
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import heapq
|
||||
import time
|
||||
from math import sqrt, exp
|
||||
from functools import reduce
|
||||
|
||||
DAY = 24 * 60 * 60 * 1000
|
||||
|
||||
@@ -87,7 +88,7 @@ class CapSimulator(object):
|
||||
mods[(duration, capNeed, clipSize, disableStagger)] = 1
|
||||
|
||||
# Loop over grouped modules, configure staggering and push to the simulation state
|
||||
for (duration, capNeed, clipSize, disableStagger), amount in mods.iteritems():
|
||||
for (duration, capNeed, clipSize, disableStagger), amount in mods.items():
|
||||
if self.stagger and not disableStagger:
|
||||
if clipSize == 0:
|
||||
duration = int(duration / amount)
|
||||
@@ -192,7 +193,7 @@ class CapSimulator(object):
|
||||
|
||||
# calculate EVE's stability value
|
||||
try:
|
||||
avgDrain = reduce(float.__add__, map(lambda x: x[2] / x[1], self.state), 0.0)
|
||||
avgDrain = reduce(float.__add__, [x[2] / x[1] for x in self.state], 0.0)
|
||||
self.cap_stable_eve = 0.25 * (1.0 + sqrt(-(2.0 * avgDrain * tau - capCapacity) / capCapacity)) ** 2
|
||||
except ValueError:
|
||||
self.cap_stable_eve = 0.0
|
||||
|
||||
@@ -1,15 +1,31 @@
|
||||
import sys
|
||||
from os.path import realpath, join, dirname, abspath
|
||||
|
||||
from logbook import Logger
|
||||
import os
|
||||
|
||||
istravis = os.environ.get('TRAVIS') == 'true'
|
||||
pyfalog = Logger(__name__)
|
||||
|
||||
debug = False
|
||||
gamedataCache = True
|
||||
saveddataCache = True
|
||||
gamedata_version = ""
|
||||
gamedata_connectionstring = 'sqlite:///' + unicode(realpath(join(dirname(abspath(__file__)), "..", "eve.db")),
|
||||
sys.getfilesystemencoding())
|
||||
saveddata_connectionstring = 'sqlite:///' + unicode(
|
||||
realpath(join(dirname(abspath(__file__)), "..", "saveddata", "saveddata.db")), sys.getfilesystemencoding())
|
||||
gamedata_connectionstring = 'sqlite:///' + realpath(join(dirname(abspath(__file__)), "..", "eve.db"))
|
||||
pyfalog.debug("Gamedata connection string: {0}", gamedata_connectionstring)
|
||||
|
||||
if istravis is True or hasattr(sys, '_called_from_test'):
|
||||
# Running in Travis. Run saveddata database in memory.
|
||||
saveddata_connectionstring = 'sqlite:///:memory:'
|
||||
else:
|
||||
saveddata_connectionstring = 'sqlite:///' + realpath(join(dirname(abspath(__file__)), "..", "saveddata", "saveddata-py3-db.db"))
|
||||
|
||||
pyfalog.debug("Saveddata connection string: {0}", saveddata_connectionstring)
|
||||
|
||||
settings = {
|
||||
"useStaticAdaptiveArmorHardener": False,
|
||||
"strictSkillLevels": True,
|
||||
}
|
||||
|
||||
# Autodetect path, only change if the autodetection bugs out.
|
||||
path = dirname(unicode(__file__, sys.getfilesystemencoding()))
|
||||
path = dirname(__file__)
|
||||
|
||||
@@ -20,14 +20,16 @@
|
||||
import threading
|
||||
|
||||
from sqlalchemy import MetaData, create_engine
|
||||
from sqlalchemy.orm import sessionmaker, scoped_session
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
|
||||
import migration
|
||||
from . import migration
|
||||
from eos import config
|
||||
from logbook import Logger
|
||||
|
||||
pyfalog = Logger(__name__)
|
||||
pyfalog.info("Initializing database")
|
||||
pyfalog.info("Gamedata connection: {0}", config.gamedata_connectionstring)
|
||||
pyfalog.info("Saveddata connection: {0}", config.saveddata_connectionstring)
|
||||
|
||||
class ReadOnlyException(Exception):
|
||||
pass
|
||||
@@ -47,9 +49,11 @@ gamedata_session = sessionmaker(bind=gamedata_engine, autoflush=False, expire_on
|
||||
# game db because we haven't reached gamedata_meta.create_all()
|
||||
try:
|
||||
config.gamedata_version = gamedata_session.execute(
|
||||
"SELECT `field_value` FROM `metadata` WHERE `field_name` LIKE 'client_build'"
|
||||
"SELECT `field_value` FROM `metadata` WHERE `field_name` LIKE 'client_build'"
|
||||
).fetchone()[0]
|
||||
except:
|
||||
except Exception as e:
|
||||
pyfalog.warning("Missing gamedata version.")
|
||||
pyfalog.critical(e)
|
||||
config.gamedata_version = None
|
||||
|
||||
saveddata_connectionstring = config.saveddata_connectionstring
|
||||
@@ -62,23 +66,32 @@ if saveddata_connectionstring is not None:
|
||||
saveddata_meta = MetaData()
|
||||
saveddata_meta.bind = saveddata_engine
|
||||
saveddata_session = sessionmaker(bind=saveddata_engine, autoflush=False, expire_on_commit=False)()
|
||||
else:
|
||||
saveddata_meta = None
|
||||
|
||||
# Lock controlling any changes introduced to session
|
||||
sd_lock = threading.Lock()
|
||||
sd_lock = threading.RLock()
|
||||
|
||||
# Import all the definitions for all our database stuff
|
||||
from eos.db.gamedata import *
|
||||
from eos.db.saveddata import *
|
||||
# noinspection PyPep8
|
||||
from eos.db.gamedata import alphaClones, attribute, category, effect, group, icon, item, marketGroup, metaData, metaGroup, queries, traits, unit
|
||||
# noinspection PyPep8
|
||||
from eos.db.saveddata import booster, cargo, character, crest, damagePattern, databaseRepair, drone, fighter, fit, implant, implantSet, loadDefaultDatabaseValues, \
|
||||
miscData, module, override, price, queries, skill, targetResists, user
|
||||
|
||||
# Import queries
|
||||
# noinspection PyPep8
|
||||
from eos.db.gamedata.queries import *
|
||||
# noinspection PyPep8
|
||||
from eos.db.saveddata.queries import *
|
||||
|
||||
# If using in memory saveddata, you'll want to reflect it so the data structure is good.
|
||||
if config.saveddata_connectionstring == "sqlite:///:memory:":
|
||||
saveddata_meta.create_all()
|
||||
pyfalog.info("Running database out of memory.")
|
||||
|
||||
|
||||
def rollback():
|
||||
with sd_lock:
|
||||
pyfalog.warning("Session rollback triggered.")
|
||||
saveddata_session.rollback()
|
||||
|
||||
@@ -21,26 +21,30 @@ from sqlalchemy import Column, String, Integer, Table, ForeignKey
|
||||
from sqlalchemy.orm import relation, mapper, synonym
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.types import AlphaClone, AlphaCloneSkill
|
||||
from eos.gamedata import AlphaClone, AlphaCloneSkill
|
||||
|
||||
alphaclones_table = Table("alphaClones", gamedata_meta,
|
||||
Column("alphaCloneID", Integer, primary_key=True),
|
||||
Column("alphaCloneName", String),
|
||||
)
|
||||
alphaclones_table = Table(
|
||||
"alphaClones",
|
||||
gamedata_meta,
|
||||
Column("alphaCloneID", Integer, primary_key=True),
|
||||
Column("alphaCloneName", String),
|
||||
)
|
||||
|
||||
alphacloneskskills_table = Table("alphaCloneSkills", gamedata_meta,
|
||||
Column("alphaCloneID", Integer, ForeignKey("alphaClones.alphaCloneID"), primary_key=True),
|
||||
Column("typeID", Integer, primary_key=True),
|
||||
Column("level", Integer),
|
||||
)
|
||||
alphacloneskskills_table = Table(
|
||||
"alphaCloneSkills",
|
||||
gamedata_meta,
|
||||
Column("alphaCloneID", Integer, ForeignKey("alphaClones.alphaCloneID"), primary_key=True),
|
||||
Column("typeID", Integer, primary_key=True),
|
||||
Column("level", Integer),
|
||||
)
|
||||
|
||||
mapper(AlphaClone, alphaclones_table,
|
||||
properties={
|
||||
"ID": synonym("alphaCloneID"),
|
||||
"ID" : synonym("alphaCloneID"),
|
||||
"skills": relation(
|
||||
AlphaCloneSkill,
|
||||
cascade="all,delete-orphan",
|
||||
backref="clone")
|
||||
AlphaCloneSkill,
|
||||
cascade="all,delete-orphan",
|
||||
backref="clone")
|
||||
})
|
||||
|
||||
mapper(AlphaCloneSkill, alphacloneskskills_table)
|
||||
|
||||
@@ -22,7 +22,7 @@ from sqlalchemy.ext.associationproxy import association_proxy
|
||||
from sqlalchemy.orm import relation, mapper, synonym, deferred
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.types import Attribute, Icon, AttributeInfo, Unit
|
||||
from eos.gamedata import Attribute, AttributeInfo, Unit, Icon
|
||||
|
||||
typeattributes_table = Table("dgmtypeattribs", gamedata_meta,
|
||||
Column("value", Float),
|
||||
@@ -45,11 +45,13 @@ mapper(Attribute, typeattributes_table,
|
||||
properties={"info": relation(AttributeInfo, lazy=False)})
|
||||
|
||||
mapper(AttributeInfo, attributes_table,
|
||||
properties={"icon": relation(Icon),
|
||||
"unit": relation(Unit),
|
||||
"ID": synonym("attributeID"),
|
||||
"name": synonym("attributeName"),
|
||||
"description": deferred(attributes_table.c.description)})
|
||||
properties={
|
||||
"icon" : relation(Icon),
|
||||
"unit" : relation(Unit),
|
||||
"ID" : synonym("attributeID"),
|
||||
"name" : synonym("attributeName"),
|
||||
"description": deferred(attributes_table.c.description)
|
||||
})
|
||||
|
||||
Attribute.ID = association_proxy("info", "attributeID")
|
||||
Attribute.name = association_proxy("info", "attributeName")
|
||||
|
||||
@@ -21,7 +21,7 @@ from sqlalchemy import Column, String, Integer, ForeignKey, Boolean, Table
|
||||
from sqlalchemy.orm import relation, mapper, synonym, deferred
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.types import Category, Icon
|
||||
from eos.gamedata import Category, Icon
|
||||
|
||||
categories_table = Table("invcategories", gamedata_meta,
|
||||
Column("categoryID", Integer, primary_key=True),
|
||||
@@ -31,7 +31,9 @@ categories_table = Table("invcategories", gamedata_meta,
|
||||
Column("iconID", Integer, ForeignKey("icons.iconID")))
|
||||
|
||||
mapper(Category, categories_table,
|
||||
properties={"icon": relation(Icon),
|
||||
"ID": synonym("categoryID"),
|
||||
"name": synonym("categoryName"),
|
||||
"description": deferred(categories_table.c.description)})
|
||||
properties={
|
||||
"icon" : relation(Icon),
|
||||
"ID" : synonym("categoryID"),
|
||||
"name" : synonym("categoryName"),
|
||||
"description": deferred(categories_table.c.description)
|
||||
})
|
||||
|
||||
@@ -18,11 +18,10 @@
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Column, String, Integer, Boolean, Table, ForeignKey
|
||||
from sqlalchemy.ext.associationproxy import association_proxy
|
||||
from sqlalchemy.orm import mapper, synonym, relation, deferred
|
||||
from sqlalchemy.orm import mapper, synonym, deferred
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.types import Effect, EffectInfo
|
||||
from eos.gamedata import Effect, ItemEffect
|
||||
|
||||
typeeffects_table = Table("dgmtypeeffects", gamedata_meta,
|
||||
Column("typeID", Integer, ForeignKey("invtypes.typeID"), primary_key=True, index=True),
|
||||
@@ -34,17 +33,14 @@ effects_table = Table("dgmeffects", gamedata_meta,
|
||||
Column("description", String),
|
||||
Column("published", Boolean),
|
||||
Column("isAssistance", Boolean),
|
||||
Column("isOffensive", Boolean))
|
||||
Column("isOffensive", Boolean),
|
||||
Column("resistanceID", Integer))
|
||||
|
||||
mapper(EffectInfo, effects_table,
|
||||
properties={"ID": synonym("effectID"),
|
||||
"name": synonym("effectName"),
|
||||
"description": deferred(effects_table.c.description)})
|
||||
mapper(Effect, effects_table,
|
||||
properties={
|
||||
"ID" : synonym("effectID"),
|
||||
"name" : synonym("effectName"),
|
||||
"description": deferred(effects_table.c.description)
|
||||
})
|
||||
|
||||
mapper(Effect, typeeffects_table,
|
||||
properties={"ID": synonym("effectID"),
|
||||
"info": relation(EffectInfo, lazy=False)})
|
||||
|
||||
Effect.name = association_proxy("info", "name")
|
||||
Effect.description = association_proxy("info", "description")
|
||||
Effect.published = association_proxy("info", "published")
|
||||
mapper(ItemEffect, typeeffects_table)
|
||||
|
||||
@@ -21,7 +21,7 @@ from sqlalchemy import Column, String, Integer, Boolean, ForeignKey, Table
|
||||
from sqlalchemy.orm import relation, mapper, synonym, deferred
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.types import Group, Icon, Category
|
||||
from eos.gamedata import Category, Group, Icon
|
||||
|
||||
groups_table = Table("invgroups", gamedata_meta,
|
||||
Column("groupID", Integer, primary_key=True),
|
||||
@@ -32,8 +32,10 @@ groups_table = Table("invgroups", gamedata_meta,
|
||||
Column("iconID", Integer, ForeignKey("icons.iconID")))
|
||||
|
||||
mapper(Group, groups_table,
|
||||
properties={"category": relation(Category, backref="groups"),
|
||||
"icon": relation(Icon),
|
||||
"ID": synonym("groupID"),
|
||||
"name": synonym("groupName"),
|
||||
"description": deferred(groups_table.c.description)})
|
||||
properties={
|
||||
"category" : relation(Category, backref="groups"),
|
||||
"icon" : relation(Icon),
|
||||
"ID" : synonym("groupID"),
|
||||
"name" : synonym("groupName"),
|
||||
"description": deferred(groups_table.c.description)
|
||||
})
|
||||
|
||||
@@ -21,7 +21,7 @@ from sqlalchemy import Column, String, Integer, Table
|
||||
from sqlalchemy.orm import mapper, synonym, deferred
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.types import Icon
|
||||
from eos.gamedata import Icon
|
||||
|
||||
icons_table = Table("icons", gamedata_meta,
|
||||
Column("iconID", Integer, primary_key=True),
|
||||
@@ -29,5 +29,7 @@ icons_table = Table("icons", gamedata_meta,
|
||||
Column("iconFile", String))
|
||||
|
||||
mapper(Icon, icons_table,
|
||||
properties={"ID": synonym("iconID"),
|
||||
"description": deferred(icons_table.c.description)})
|
||||
properties={
|
||||
"ID" : synonym("iconID"),
|
||||
"description": deferred(icons_table.c.description)
|
||||
})
|
||||
|
||||
@@ -21,9 +21,10 @@ from sqlalchemy import Column, String, Integer, Boolean, ForeignKey, Table, Floa
|
||||
from sqlalchemy.ext.associationproxy import association_proxy
|
||||
from sqlalchemy.orm import relation, mapper, synonym, deferred
|
||||
from sqlalchemy.orm.collections import attribute_mapped_collection
|
||||
from eos.db.gamedata.effect import typeeffects_table
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.types import Icon, Attribute, Item, Effect, MetaType, Group, Traits
|
||||
from eos.gamedata import Attribute, Effect, Group, Icon, Item, MetaType, Traits
|
||||
|
||||
items_table = Table("invtypes", gamedata_meta,
|
||||
Column("typeID", Integer, primary_key=True),
|
||||
@@ -39,23 +40,24 @@ items_table = Table("invtypes", gamedata_meta,
|
||||
Column("iconID", Integer, ForeignKey("icons.iconID")),
|
||||
Column("groupID", Integer, ForeignKey("invgroups.groupID"), index=True))
|
||||
|
||||
from .metaGroup import metatypes_table # noqa
|
||||
from .traits import traits_table # noqa
|
||||
from .metaGroup import metatypes_table # noqa
|
||||
from .traits import traits_table # noqa
|
||||
|
||||
mapper(Item, items_table,
|
||||
properties={"group": relation(Group, backref="items"),
|
||||
"icon": relation(Icon),
|
||||
"_Item__attributes": relation(Attribute, collection_class=attribute_mapped_collection('name')),
|
||||
"effects": relation(Effect, collection_class=attribute_mapped_collection('name')),
|
||||
"metaGroup": relation(MetaType,
|
||||
properties={
|
||||
"group" : relation(Group, backref="items"),
|
||||
"icon" : relation(Icon),
|
||||
"_Item__attributes": relation(Attribute, cascade='all, delete, delete-orphan', collection_class=attribute_mapped_collection('name')),
|
||||
"effects": relation(Effect, secondary=typeeffects_table, collection_class=attribute_mapped_collection('name')),
|
||||
"metaGroup" : relation(MetaType,
|
||||
primaryjoin=metatypes_table.c.typeID == items_table.c.typeID,
|
||||
uselist=False),
|
||||
"ID": synonym("typeID"),
|
||||
"name": synonym("typeName"),
|
||||
"description": deferred(items_table.c.description),
|
||||
"traits": relation(Traits,
|
||||
primaryjoin=traits_table.c.typeID == items_table.c.typeID,
|
||||
uselist=False)
|
||||
})
|
||||
"ID" : synonym("typeID"),
|
||||
"name" : synonym("typeName"),
|
||||
"description" : deferred(items_table.c.description),
|
||||
"traits" : relation(Traits,
|
||||
primaryjoin=traits_table.c.typeID == items_table.c.typeID,
|
||||
uselist=False)
|
||||
})
|
||||
|
||||
Item.category = association_proxy("group", "category")
|
||||
|
||||
@@ -21,7 +21,7 @@ from sqlalchemy import Column, String, Integer, Boolean, ForeignKey, Table
|
||||
from sqlalchemy.orm import relation, mapper, synonym, deferred
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.types import Item, MarketGroup, Icon
|
||||
from eos.gamedata import Icon, Item, MarketGroup
|
||||
|
||||
marketgroups_table = Table("invmarketgroups", gamedata_meta,
|
||||
Column("marketGroupID", Integer, primary_key=True),
|
||||
@@ -33,10 +33,12 @@ marketgroups_table = Table("invmarketgroups", gamedata_meta,
|
||||
Column("iconID", Integer, ForeignKey("icons.iconID")))
|
||||
|
||||
mapper(MarketGroup, marketgroups_table,
|
||||
properties={"items": relation(Item, backref="marketGroup"),
|
||||
"parent": relation(MarketGroup, backref="children",
|
||||
remote_side=[marketgroups_table.c.marketGroupID]),
|
||||
"icon": relation(Icon),
|
||||
"ID": synonym("marketGroupID"),
|
||||
"name": synonym("marketGroupName"),
|
||||
"description": deferred(marketgroups_table.c.description)})
|
||||
properties={
|
||||
"items" : relation(Item, backref="marketGroup"),
|
||||
"parent" : relation(MarketGroup, backref="children",
|
||||
remote_side=[marketgroups_table.c.marketGroupID]),
|
||||
"icon" : relation(Icon),
|
||||
"ID" : synonym("marketGroupID"),
|
||||
"name" : synonym("marketGroupName"),
|
||||
"description": deferred(marketgroups_table.c.description)
|
||||
})
|
||||
|
||||
@@ -1,30 +1,30 @@
|
||||
# ===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
#
|
||||
# eos is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# eos is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Column, Table, String
|
||||
from sqlalchemy.orm import mapper
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.types import MetaData
|
||||
|
||||
metadata_table = Table("metadata", gamedata_meta,
|
||||
Column("field_name", String, primary_key=True),
|
||||
Column("field_value", String))
|
||||
|
||||
mapper(MetaData, metadata_table)
|
||||
# ===============================================================================
|
||||
# Copyright (C) 2010 Diego Duclos
|
||||
#
|
||||
# This file is part of eos.
|
||||
#
|
||||
# eos is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# eos is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Column, Table, String
|
||||
from sqlalchemy.orm import mapper
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.gamedata import MetaData
|
||||
|
||||
metadata_table = Table("metadata", gamedata_meta,
|
||||
Column("field_name", String, primary_key=True),
|
||||
Column("field_value", String))
|
||||
|
||||
mapper(MetaData, metadata_table)
|
||||
|
||||
@@ -23,7 +23,7 @@ from sqlalchemy.orm import relation, mapper, synonym
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.db.gamedata.item import items_table
|
||||
from eos.types import MetaGroup, Item, MetaType
|
||||
from eos.gamedata import Item, MetaGroup, MetaType
|
||||
|
||||
metagroups_table = Table("invmetagroups", gamedata_meta,
|
||||
Column("metaGroupID", Integer, primary_key=True),
|
||||
@@ -35,13 +35,17 @@ metatypes_table = Table("invmetatypes", gamedata_meta,
|
||||
Column("metaGroupID", Integer, ForeignKey("invmetagroups.metaGroupID")))
|
||||
|
||||
mapper(MetaGroup, metagroups_table,
|
||||
properties={"ID": synonym("metaGroupID"),
|
||||
"name": synonym("metaGroupName")})
|
||||
properties={
|
||||
"ID" : synonym("metaGroupID"),
|
||||
"name": synonym("metaGroupName")
|
||||
})
|
||||
|
||||
mapper(MetaType, metatypes_table,
|
||||
properties={"ID": synonym("metaGroupID"),
|
||||
"parent": relation(Item, primaryjoin=metatypes_table.c.parentTypeID == items_table.c.typeID),
|
||||
"items": relation(Item, primaryjoin=metatypes_table.c.typeID == items_table.c.typeID),
|
||||
"info": relation(MetaGroup, lazy=False)})
|
||||
properties={
|
||||
"ID" : synonym("metaGroupID"),
|
||||
"parent": relation(Item, primaryjoin=metatypes_table.c.parentTypeID == items_table.c.typeID),
|
||||
"items" : relation(Item, primaryjoin=metatypes_table.c.typeID == items_table.c.typeID),
|
||||
"info" : relation(MetaGroup, lazy=False)
|
||||
})
|
||||
|
||||
MetaType.name = association_proxy("info", "name")
|
||||
|
||||
@@ -17,24 +17,21 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy.orm import join, exc
|
||||
from sqlalchemy.orm import join, exc, aliased
|
||||
from sqlalchemy.sql import and_, or_, select
|
||||
|
||||
import eos.config
|
||||
# TODO: Unsure which item the code below needs :(
|
||||
# from eos.gamedata import Item
|
||||
from eos.gamedata import Attribute
|
||||
from eos.db import gamedata_session
|
||||
from eos.db.gamedata.metaGroup import metatypes_table, items_table
|
||||
from eos.db.gamedata.group import groups_table
|
||||
from eos.db.util import processEager, processWhere
|
||||
from eos.types import Item, Category, Group, MarketGroup, AttributeInfo, MetaData, MetaGroup, AlphaClone
|
||||
from eos.gamedata import AlphaClone, Attribute, Category, Group, Item, MarketGroup, MetaGroup, AttributeInfo, MetaData
|
||||
|
||||
cache = {}
|
||||
configVal = getattr(eos.config, "gamedataCache", None)
|
||||
if configVal is True:
|
||||
def cachedQuery(amount, *keywords):
|
||||
def deco(function):
|
||||
cache = {}
|
||||
|
||||
def checkAndReturn(*args, **kwargs):
|
||||
useCache = kwargs.pop("useCache", True)
|
||||
cacheKey = []
|
||||
@@ -84,7 +81,7 @@ def getItem(lookfor, eager=None):
|
||||
item = gamedata_session.query(Item).get(lookfor)
|
||||
else:
|
||||
item = gamedata_session.query(Item).options(*processEager(eager)).filter(Item.ID == lookfor).first()
|
||||
elif isinstance(lookfor, basestring):
|
||||
elif isinstance(lookfor, str):
|
||||
if lookfor in itemNameMap:
|
||||
id = itemNameMap[lookfor]
|
||||
if eager is None:
|
||||
@@ -100,6 +97,35 @@ def getItem(lookfor, eager=None):
|
||||
return item
|
||||
|
||||
|
||||
@cachedQuery(1, "lookfor")
|
||||
def getItems(lookfor, eager=None):
|
||||
"""
|
||||
Gets a list of items. Does a bit of cache hackery to get working properly -- cache
|
||||
is usually based on function calls with the parameters, needed to extract data directly.
|
||||
Works well enough. Not currently used, but it's here for possible future inclusion
|
||||
"""
|
||||
|
||||
toGet = []
|
||||
results = []
|
||||
|
||||
for id in lookfor:
|
||||
if (id, None) in cache:
|
||||
results.append(cache.get((id, None)))
|
||||
else:
|
||||
toGet.append(id)
|
||||
|
||||
if len(toGet) > 0:
|
||||
# Get items that aren't currently cached, and store them in the cache
|
||||
items = gamedata_session.query(Item).filter(Item.ID.in_(toGet)).all()
|
||||
for item in items:
|
||||
cache[(item.ID, None)] = item
|
||||
results += items
|
||||
|
||||
# sort the results based on the original indexing
|
||||
results.sort(key=lambda x: lookfor.index(x.ID))
|
||||
return results
|
||||
|
||||
|
||||
@cachedQuery(1, "lookfor")
|
||||
def getAlphaClone(lookfor, eager=None):
|
||||
if isinstance(lookfor, int):
|
||||
@@ -128,7 +154,7 @@ def getGroup(lookfor, eager=None):
|
||||
group = gamedata_session.query(Group).get(lookfor)
|
||||
else:
|
||||
group = gamedata_session.query(Group).options(*processEager(eager)).filter(Group.ID == lookfor).first()
|
||||
elif isinstance(lookfor, basestring):
|
||||
elif isinstance(lookfor, str):
|
||||
if lookfor in groupNameMap:
|
||||
id = groupNameMap[lookfor]
|
||||
if eager is None:
|
||||
@@ -154,19 +180,19 @@ def getCategory(lookfor, eager=None):
|
||||
category = gamedata_session.query(Category).get(lookfor)
|
||||
else:
|
||||
category = gamedata_session.query(Category).options(*processEager(eager)).filter(
|
||||
Category.ID == lookfor).first()
|
||||
elif isinstance(lookfor, basestring):
|
||||
Category.ID == lookfor).first()
|
||||
elif isinstance(lookfor, str):
|
||||
if lookfor in categoryNameMap:
|
||||
id = categoryNameMap[lookfor]
|
||||
if eager is None:
|
||||
category = gamedata_session.query(Category).get(id)
|
||||
else:
|
||||
category = gamedata_session.query(Category).options(*processEager(eager)).filter(
|
||||
Category.ID == id).first()
|
||||
Category.ID == id).first()
|
||||
else:
|
||||
# Category names are unique, so we can use first() instead of one()
|
||||
category = gamedata_session.query(Category).options(*processEager(eager)).filter(
|
||||
Category.name == lookfor).first()
|
||||
Category.name == lookfor).first()
|
||||
categoryNameMap[lookfor] = category.ID
|
||||
else:
|
||||
raise TypeError("Need integer or string as argument")
|
||||
@@ -183,19 +209,19 @@ def getMetaGroup(lookfor, eager=None):
|
||||
metaGroup = gamedata_session.query(MetaGroup).get(lookfor)
|
||||
else:
|
||||
metaGroup = gamedata_session.query(MetaGroup).options(*processEager(eager)).filter(
|
||||
MetaGroup.ID == lookfor).first()
|
||||
elif isinstance(lookfor, basestring):
|
||||
MetaGroup.ID == lookfor).first()
|
||||
elif isinstance(lookfor, str):
|
||||
if lookfor in metaGroupNameMap:
|
||||
id = metaGroupNameMap[lookfor]
|
||||
if eager is None:
|
||||
metaGroup = gamedata_session.query(MetaGroup).get(id)
|
||||
else:
|
||||
metaGroup = gamedata_session.query(MetaGroup).options(*processEager(eager)).filter(
|
||||
MetaGroup.ID == id).first()
|
||||
MetaGroup.ID == id).first()
|
||||
else:
|
||||
# MetaGroup names are unique, so we can use first() instead of one()
|
||||
metaGroup = gamedata_session.query(MetaGroup).options(*processEager(eager)).filter(
|
||||
MetaGroup.name == lookfor).first()
|
||||
MetaGroup.name == lookfor).first()
|
||||
metaGroupNameMap[lookfor] = metaGroup.ID
|
||||
else:
|
||||
raise TypeError("Need integer or string as argument")
|
||||
@@ -209,7 +235,7 @@ def getMarketGroup(lookfor, eager=None):
|
||||
marketGroup = gamedata_session.query(MarketGroup).get(lookfor)
|
||||
else:
|
||||
marketGroup = gamedata_session.query(MarketGroup).options(*processEager(eager)).filter(
|
||||
MarketGroup.ID == lookfor).first()
|
||||
MarketGroup.ID == lookfor).first()
|
||||
else:
|
||||
raise TypeError("Need integer as argument")
|
||||
return marketGroup
|
||||
@@ -219,19 +245,19 @@ def getMarketGroup(lookfor, eager=None):
|
||||
def getItemsByCategory(filter, where=None, eager=None):
|
||||
if isinstance(filter, int):
|
||||
filter = Category.ID == filter
|
||||
elif isinstance(filter, basestring):
|
||||
elif isinstance(filter, str):
|
||||
filter = Category.name == filter
|
||||
else:
|
||||
raise TypeError("Need integer or string as argument")
|
||||
|
||||
filter = processWhere(filter, where)
|
||||
return gamedata_session.query(Item).options(*processEager(eager)).join(Item.group, Group.category).filter(
|
||||
filter).all()
|
||||
filter).all()
|
||||
|
||||
|
||||
@cachedQuery(3, "where", "nameLike", "join")
|
||||
def searchItems(nameLike, where=None, join=None, eager=None):
|
||||
if not isinstance(nameLike, basestring):
|
||||
if not isinstance(nameLike, str):
|
||||
raise TypeError("Need string as argument")
|
||||
|
||||
if join is None:
|
||||
@@ -242,7 +268,7 @@ def searchItems(nameLike, where=None, join=None, eager=None):
|
||||
|
||||
items = gamedata_session.query(Item).options(*processEager(eager)).join(*join)
|
||||
for token in nameLike.split(' '):
|
||||
token_safe = u"%{0}%".format(sqlizeString(token))
|
||||
token_safe = "%{0}%".format(sqlizeString(token))
|
||||
if where is not None:
|
||||
items = items.filter(and_(Item.name.like(token_safe, escape="\\"), where))
|
||||
else:
|
||||
@@ -251,8 +277,24 @@ def searchItems(nameLike, where=None, join=None, eager=None):
|
||||
return items
|
||||
|
||||
|
||||
@cachedQuery(3, "where", "nameLike", "join")
|
||||
def searchSkills(nameLike, where=None, eager=None):
|
||||
if not isinstance(nameLike, str):
|
||||
raise TypeError("Need string as argument")
|
||||
|
||||
items = gamedata_session.query(Item).options(*processEager(eager)).join(Item.group, Group.category)
|
||||
for token in nameLike.split(' '):
|
||||
token_safe = "%{0}%".format(sqlizeString(token))
|
||||
if where is not None:
|
||||
items = items.filter(and_(Item.name.like(token_safe, escape="\\"), Category.ID == 16, where))
|
||||
else:
|
||||
items = items.filter(and_(Item.name.like(token_safe, escape="\\"), Category.ID == 16))
|
||||
items = items.limit(100).all()
|
||||
return items
|
||||
|
||||
|
||||
@cachedQuery(2, "where", "itemids")
|
||||
def getVariations(itemids, where=None, eager=None):
|
||||
def getVariations(itemids, groupIDs=None, where=None, eager=None):
|
||||
for itemid in itemids:
|
||||
if not isinstance(itemid, int):
|
||||
raise TypeError("All passed item IDs must be integers")
|
||||
@@ -264,13 +306,23 @@ def getVariations(itemids, where=None, eager=None):
|
||||
filter = processWhere(itemfilter, where)
|
||||
joinon = items_table.c.typeID == metatypes_table.c.typeID
|
||||
vars = gamedata_session.query(Item).options(*processEager(eager)).join((metatypes_table, joinon)).filter(
|
||||
filter).all()
|
||||
filter).all()
|
||||
|
||||
if vars:
|
||||
return vars
|
||||
elif groupIDs:
|
||||
itemfilter = or_(*(groups_table.c.groupID == groupID for groupID in groupIDs))
|
||||
filter = processWhere(itemfilter, where)
|
||||
joinon = items_table.c.groupID == groups_table.c.groupID
|
||||
vars = gamedata_session.query(Item).options(*processEager(eager)).join((groups_table, joinon)).filter(
|
||||
filter).all()
|
||||
|
||||
return vars
|
||||
|
||||
|
||||
@cachedQuery(1, "attr")
|
||||
def getAttributeInfo(attr, eager=None):
|
||||
if isinstance(attr, basestring):
|
||||
if isinstance(attr, str):
|
||||
filter = AttributeInfo.name == attr
|
||||
elif isinstance(attr, int):
|
||||
filter = AttributeInfo.ID == attr
|
||||
@@ -285,7 +337,7 @@ def getAttributeInfo(attr, eager=None):
|
||||
|
||||
@cachedQuery(1, "field")
|
||||
def getMetaData(field):
|
||||
if isinstance(field, basestring):
|
||||
if isinstance(field, str):
|
||||
data = gamedata_session.query(MetaData).get(field)
|
||||
else:
|
||||
raise TypeError("Need string as argument")
|
||||
@@ -307,3 +359,25 @@ def directAttributeRequest(itemIDs, attrIDs):
|
||||
|
||||
result = gamedata_session.execute(q).fetchall()
|
||||
return result
|
||||
|
||||
|
||||
def getRequiredFor(itemID, attrMapping):
|
||||
Attribute1 = aliased(Attribute)
|
||||
Attribute2 = aliased(Attribute)
|
||||
|
||||
skillToLevelClauses = []
|
||||
|
||||
for attrSkill, attrLevel in attrMapping.items():
|
||||
skillToLevelClauses.append(and_(Attribute1.attributeID == attrSkill, Attribute2.attributeID == attrLevel))
|
||||
|
||||
queryOr = or_(*skillToLevelClauses)
|
||||
|
||||
q = select((Attribute2.typeID, Attribute2.value),
|
||||
and_(Attribute1.value == itemID, queryOr),
|
||||
from_obj=[
|
||||
join(Attribute1, Attribute2, Attribute1.typeID == Attribute2.typeID)
|
||||
])
|
||||
|
||||
result = gamedata_session.execute(q).fetchall()
|
||||
|
||||
return result
|
||||
|
||||
@@ -2,7 +2,7 @@ from sqlalchemy import Column, Table, Integer, String, ForeignKey
|
||||
from sqlalchemy.orm import mapper
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.types import Traits
|
||||
from eos.gamedata import Traits
|
||||
|
||||
traits_table = Table("invtraits", gamedata_meta,
|
||||
Column("typeID", Integer, ForeignKey("invtypes.typeID"), primary_key=True),
|
||||
|
||||
@@ -21,7 +21,7 @@ from sqlalchemy import Column, Table, Integer, String
|
||||
from sqlalchemy.orm import mapper, synonym
|
||||
|
||||
from eos.db import gamedata_meta
|
||||
from eos.types import Unit
|
||||
from eos.gamedata import Unit
|
||||
|
||||
groups_table = Table("dgmunits", gamedata_meta,
|
||||
Column("unitID", Integer, primary_key=True),
|
||||
@@ -29,5 +29,7 @@ groups_table = Table("dgmunits", gamedata_meta,
|
||||
Column("displayName", String))
|
||||
|
||||
mapper(Unit, groups_table,
|
||||
properties={"ID": synonym("unitID"),
|
||||
"name": synonym("unitName")})
|
||||
properties={
|
||||
"ID" : synonym("unitID"),
|
||||
"name": synonym("unitName")
|
||||
})
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import logging
|
||||
from logbook import Logger
|
||||
import shutil
|
||||
import time
|
||||
|
||||
import config
|
||||
import migrations
|
||||
from . import migrations
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
pyfalog = Logger(__name__)
|
||||
|
||||
|
||||
def getVersion(db):
|
||||
@@ -34,10 +34,10 @@ def update(saveddata_engine):
|
||||
|
||||
shutil.copyfile(config.saveDB, toFile)
|
||||
|
||||
for version in xrange(dbVersion, appVersion):
|
||||
for version in range(dbVersion, appVersion):
|
||||
func = migrations.updates[version + 1]
|
||||
if func:
|
||||
logger.info("Applying database update: %d", version + 1)
|
||||
pyfalog.info("Applying database update: {0}", version + 1)
|
||||
func(saveddata_engine)
|
||||
|
||||
# when all is said and done, set version to current
|
||||
|
||||
@@ -15,7 +15,27 @@ updates = {}
|
||||
appVersion = 0
|
||||
|
||||
prefix = __name__ + "."
|
||||
for importer, modname, ispkg in pkgutil.iter_modules(__path__, prefix):
|
||||
|
||||
# load modules to work based with and without pyinstaller
|
||||
# from: https://github.com/webcomics/dosage/blob/master/dosagelib/loader.py
|
||||
# see: https://github.com/pyinstaller/pyinstaller/issues/1905
|
||||
|
||||
# load modules using iter_modules()
|
||||
# (should find all filters in normal build, but not pyinstaller)
|
||||
module_names = [m[1] for m in pkgutil.iter_modules(__path__, prefix)]
|
||||
|
||||
# special handling for PyInstaller
|
||||
importers = map(pkgutil.get_importer, __path__)
|
||||
toc = set()
|
||||
for i in importers:
|
||||
if hasattr(i, 'toc'):
|
||||
toc |= i.toc
|
||||
|
||||
for elm in toc:
|
||||
if elm.startswith(prefix):
|
||||
module_names.append(elm)
|
||||
|
||||
for modname in module_names:
|
||||
# loop through python files, extracting update number and function, and
|
||||
# adding it to a list
|
||||
modname_tail = modname.rsplit('.', 1)[-1]
|
||||
|
||||
@@ -14,32 +14,32 @@ Migration 1
|
||||
import sqlalchemy
|
||||
|
||||
CONVERSIONS = {
|
||||
6135: [ # Scoped Cargo Scanner
|
||||
6135 : [ # Scoped Cargo Scanner
|
||||
6133, # Interior Type-E Cargo Identifier
|
||||
],
|
||||
6527: [ # Compact Ship Scanner
|
||||
6527 : [ # Compact Ship Scanner
|
||||
6525, # Ta3 Perfunctory Vessel Probe
|
||||
6529, # Speculative Ship Identifier I
|
||||
6531, # Practical Type-E Ship Probe
|
||||
],
|
||||
6569: [ # Scoped Survey Scanner
|
||||
6569 : [ # Scoped Survey Scanner
|
||||
6567, # ML-3 Amphilotite Mining Probe
|
||||
6571, # Rock-Scanning Sensor Array I
|
||||
6573, # 'Dactyl' Type-E Asteroid Analyzer
|
||||
],
|
||||
509: [ # 'Basic' Capacitor Flux Coil
|
||||
509 : [ # 'Basic' Capacitor Flux Coil
|
||||
8163, # Partial Power Plant Manager: Capacitor Flux
|
||||
8165, # Alpha Reactor Control: Capacitor Flux
|
||||
8167, # Type-E Power Core Modification: Capacitor Flux
|
||||
8169, # Marked Generator Refitting: Capacitor Flux
|
||||
],
|
||||
8135: [ # Restrained Capacitor Flux Coil
|
||||
8135 : [ # Restrained Capacitor Flux Coil
|
||||
8131, # Local Power Plant Manager: Capacitor Flux I
|
||||
],
|
||||
8133: [ # Compact Capacitor Flux Coil
|
||||
8133 : [ # Compact Capacitor Flux Coil
|
||||
8137, # Mark I Generator Refitting: Capacitor Flux
|
||||
],
|
||||
3469: [ # Basic Co-Processor
|
||||
3469 : [ # Basic Co-Processor
|
||||
8744, # Nanoelectrical Co-Processor
|
||||
8743, # Nanomechanical CPU Enhancer
|
||||
8746, # Quantum Co-Processor
|
||||
@@ -47,17 +47,17 @@ CONVERSIONS = {
|
||||
15425, # Naiyon's Modified Co-Processor (never existed but convert
|
||||
# anyway as some fits may include it)
|
||||
],
|
||||
8748: [ # Upgraded Co-Processor
|
||||
8748 : [ # Upgraded Co-Processor
|
||||
8747, # Nanomechanical CPU Enhancer I
|
||||
8750, # Quantum Co-Processor I
|
||||
8749, # Photonic CPU Enhancer I
|
||||
],
|
||||
1351: [ # Basic Reactor Control Unit
|
||||
1351 : [ # Basic Reactor Control Unit
|
||||
8251, # Partial Power Plant Manager: Reaction Control
|
||||
8253, # Alpha Reactor Control: Reaction Control
|
||||
8257, # Marked Generator Refitting: Reaction Control
|
||||
],
|
||||
8263: [ # Compact Reactor Control Unit
|
||||
8263 : [ # Compact Reactor Control Unit
|
||||
8259, # Local Power Plant Manager: Reaction Control I
|
||||
8265, # Mark I Generator Refitting: Reaction Control
|
||||
8261, # Beta Reactor Control: Reaction Control I
|
||||
@@ -69,15 +69,15 @@ CONVERSIONS = {
|
||||
31936: [ # Navy Micro Auxiliary Power Core
|
||||
16543, # Micro 'Vigor' Core Augmentation
|
||||
],
|
||||
8089: [ # Compact Light Missile Launcher
|
||||
8089 : [ # Compact Light Missile Launcher
|
||||
8093, # Prototype 'Arbalest' Light Missile Launcher
|
||||
],
|
||||
8091: [ # Ample Light Missile Launcher
|
||||
8091 : [ # Ample Light Missile Launcher
|
||||
7993, # Experimental TE-2100 Light Missile Launcher
|
||||
],
|
||||
# Surface Cargo Scanner I was removed from game, however no mention of
|
||||
# replacement module in patch notes. Morphing it to meta 0 module to be safe
|
||||
442: [ # Cargo Scanner I
|
||||
442 : [ # Cargo Scanner I
|
||||
6129, # Surface Cargo Scanner I
|
||||
]
|
||||
}
|
||||
@@ -91,7 +91,7 @@ def upgrade(saveddata_engine):
|
||||
saveddata_engine.execute("ALTER TABLE fits ADD COLUMN targetResistsID INTEGER;")
|
||||
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.iteritems():
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
|
||||
@@ -14,7 +14,7 @@ CONVERSIONS = {
|
||||
22947: ( # 'Beatnik' Small Remote Armor Repairer
|
||||
23414, # 'Brotherhood' Small Remote Armor Repairer
|
||||
),
|
||||
8295: ( # Type-D Restrained Shield Flux Coil
|
||||
8295 : ( # Type-D Restrained Shield Flux Coil
|
||||
8293, # Beta Reactor Control: Shield Flux I
|
||||
),
|
||||
16499: ( # Heavy Knave Scoped Energy Nosferatu
|
||||
@@ -29,13 +29,13 @@ CONVERSIONS = {
|
||||
16447: ( # Medium Solace Scoped Remote Armor Repairer
|
||||
16445, # Medium 'Arup' Remote Armor Repairer
|
||||
),
|
||||
508: ( # 'Basic' Shield Flux Coil
|
||||
508 : ( # 'Basic' Shield Flux Coil
|
||||
8325, # Alpha Reactor Shield Flux
|
||||
8329, # Marked Generator Refitting: Shield Flux
|
||||
8323, # Partial Power Plant Manager: Shield Flux
|
||||
8327, # Type-E Power Core Modification: Shield Flux
|
||||
),
|
||||
1419: ( # 'Basic' Shield Power Relay
|
||||
1419 : ( # 'Basic' Shield Power Relay
|
||||
8341, # Alpha Reactor Shield Power Relay
|
||||
8345, # Marked Generator Refitting: Shield Power Relay
|
||||
8339, # Partial Power Plant Manager: Shield Power Relay
|
||||
@@ -47,57 +47,57 @@ CONVERSIONS = {
|
||||
16505: ( # Medium Ghoul Compact Energy Nosferatu
|
||||
16511, # Medium Diminishing Power System Drain I
|
||||
),
|
||||
8297: ( # Mark I Compact Shield Flux Coil
|
||||
8297 : ( # Mark I Compact Shield Flux Coil
|
||||
8291, # Local Power Plant Manager: Reaction Shield Flux I
|
||||
),
|
||||
16455: ( # Large Solace Scoped Remote Armor Repairer
|
||||
16453, # Large 'Arup' Remote Armor Repairer
|
||||
),
|
||||
6485: ( # M51 Benefactor Compact Shield Recharger
|
||||
6485 : ( # M51 Benefactor Compact Shield Recharger
|
||||
6491, # Passive Barrier Compensator I
|
||||
6489, # 'Benefactor' Ward Reconstructor
|
||||
6487, # Supplemental Screen Generator I
|
||||
),
|
||||
5137: ( # Small Knave Scoped Energy Nosferatu
|
||||
5137 : ( # Small Knave Scoped Energy Nosferatu
|
||||
5135, # E5 Prototype Energy Vampire
|
||||
),
|
||||
8579: ( # Medium Murky Compact Remote Shield Booster
|
||||
8579 : ( # Medium Murky Compact Remote Shield Booster
|
||||
8581, # Medium 'Atonement' Remote Shield Booster
|
||||
),
|
||||
8531: ( # Small Murky Compact Remote Shield Booster
|
||||
8531 : ( # Small Murky Compact Remote Shield Booster
|
||||
8533, # Small 'Atonement' Remote Shield Booster
|
||||
),
|
||||
16497: ( # Heavy Ghoul Compact Energy Nosferatu
|
||||
16503, # Heavy Diminishing Power System Drain I
|
||||
),
|
||||
4477: ( # Small Gremlin Compact Energy Neutralizer
|
||||
4477 : ( # Small Gremlin Compact Energy Neutralizer
|
||||
4475, # Small Unstable Power Fluctuator I
|
||||
),
|
||||
8337: ( # Mark I Compact Shield Power Relay
|
||||
8337 : ( # Mark I Compact Shield Power Relay
|
||||
8331, # Local Power Plant Manager: Reaction Shield Power Relay I
|
||||
),
|
||||
23416: ( # 'Peace' Large Remote Armor Repairer
|
||||
22951, # 'Pacifier' Large Remote Armor Repairer
|
||||
),
|
||||
5141: ( # Small Ghoul Compact Energy Nosferatu
|
||||
5141 : ( # Small Ghoul Compact Energy Nosferatu
|
||||
5139, # Small Diminishing Power System Drain I
|
||||
),
|
||||
4471: ( # Small Infectious Scoped Energy Neutralizer
|
||||
4471 : ( # Small Infectious Scoped Energy Neutralizer
|
||||
4473, # Small Rudimentary Energy Destabilizer I
|
||||
),
|
||||
16469: ( # Medium Infectious Scoped Energy Neutralizer
|
||||
16465, # Medium Rudimentary Energy Destabilizer I
|
||||
),
|
||||
8335: ( # Type-D Restrained Shield Power Relay
|
||||
8335 : ( # Type-D Restrained Shield Power Relay
|
||||
8333, # Beta Reactor Control: Shield Power Relay I
|
||||
),
|
||||
405: ( # 'Micro' Remote Shield Booster
|
||||
405 : ( # 'Micro' Remote Shield Booster
|
||||
8631, # Micro Asymmetric Remote Shield Booster
|
||||
8627, # Micro Murky Remote Shield Booster
|
||||
8629, # Micro 'Atonement' Remote Shield Booster
|
||||
8633, # Micro S95a Remote Shield Booster
|
||||
),
|
||||
8635: ( # Large Murky Compact Remote Shield Booster
|
||||
8635 : ( # Large Murky Compact Remote Shield Booster
|
||||
8637, # Large 'Atonement' Remote Shield Booster
|
||||
),
|
||||
16507: ( # Medium Knave Scoped Energy Nosferatu
|
||||
@@ -108,7 +108,7 @@ CONVERSIONS = {
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.iteritems():
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
|
||||
@@ -13,26 +13,26 @@ CONVERSIONS = {
|
||||
16461, # Multiphasic Bolt Array I
|
||||
16463, # 'Pandemonium' Ballistic Enhancement
|
||||
),
|
||||
5281: ( # Coadjunct Scoped Remote Sensor Booster
|
||||
5281 : ( # Coadjunct Scoped Remote Sensor Booster
|
||||
7218, # Piercing ECCM Emitter I
|
||||
),
|
||||
5365: ( # Cetus Scoped Burst Jammer
|
||||
5365 : ( # Cetus Scoped Burst Jammer
|
||||
5359, # 1Z-3 Subversive ECM Eruption
|
||||
),
|
||||
1973: ( # Sensor Booster I
|
||||
1973 : ( # Sensor Booster I
|
||||
1947, # ECCM - Radar I
|
||||
2002, # ECCM - Ladar I
|
||||
2003, # ECCM - Magnetometric I
|
||||
2004, # ECCM - Gravimetric I
|
||||
2005, # ECCM - Omni I
|
||||
),
|
||||
1951: ( # 'Basic' Tracking Enhancer
|
||||
1951 : ( # 'Basic' Tracking Enhancer
|
||||
6322, # Beta-Nought Tracking Mode
|
||||
6323, # Azimuth Descalloping Tracking Enhancer
|
||||
6324, # F-AQ Delay-Line Scan Tracking Subroutines
|
||||
6321, # Beam Parallax Tracking Program
|
||||
),
|
||||
521: ( # 'Basic' Damage Control
|
||||
521 : ( # 'Basic' Damage Control
|
||||
5829, # GLFF Containment Field
|
||||
5831, # Interior Force Field Array
|
||||
5835, # F84 Local Damage System
|
||||
@@ -42,13 +42,13 @@ CONVERSIONS = {
|
||||
22939, # 'Boss' Remote Sensor Booster
|
||||
22941, # 'Entrepreneur' Remote Sensor Booster
|
||||
),
|
||||
5443: ( # Faint Epsilon Scoped Warp Scrambler
|
||||
5443 : ( # Faint Epsilon Scoped Warp Scrambler
|
||||
5441, # Fleeting Progressive Warp Scrambler I
|
||||
),
|
||||
1963: ( # Remote Sensor Booster I
|
||||
1963 : ( # Remote Sensor Booster I
|
||||
1959, # ECCM Projector I
|
||||
),
|
||||
6325: ( # Fourier Compact Tracking Enhancer
|
||||
6325 : ( # Fourier Compact Tracking Enhancer
|
||||
6326, # Sigma-Nought Tracking Mode I
|
||||
6327, # Auto-Gain Control Tracking Enhancer I
|
||||
6328, # F-aQ Phase Code Tracking Subroutines
|
||||
@@ -68,19 +68,19 @@ CONVERSIONS = {
|
||||
22919: ( # 'Monopoly' Magnetic Field Stabilizer
|
||||
22917, # 'Capitalist' Magnetic Field Stabilizer I
|
||||
),
|
||||
5839: ( # IFFA Compact Damage Control
|
||||
5839 : ( # IFFA Compact Damage Control
|
||||
5841, # Emergency Damage Control I
|
||||
5843, # F85 Peripheral Damage System I
|
||||
5837, # Pseudoelectron Containment Field I
|
||||
),
|
||||
522: ( # 'Micro' Cap Battery
|
||||
522 : ( # 'Micro' Cap Battery
|
||||
4747, # Micro Ld-Acid Capacitor Battery I
|
||||
4751, # Micro Ohm Capacitor Reserve I
|
||||
4745, # Micro F-4a Ld-Sulfate Capacitor Charge Unit
|
||||
4749, # Micro Peroxide Capacitor Power Cell
|
||||
3480, # Micro Capacitor Battery II
|
||||
),
|
||||
518: ( # 'Basic' Gyrostabilizer
|
||||
518 : ( # 'Basic' Gyrostabilizer
|
||||
5915, # Lateral Gyrostabilizer
|
||||
5919, # F-M2 Weapon Inertial Suspensor
|
||||
5913, # Hydraulic Stabilization Actuator
|
||||
@@ -89,19 +89,19 @@ CONVERSIONS = {
|
||||
19931: ( # Compulsive Scoped Multispectral ECM
|
||||
19933, # 'Hypnos' Multispectral ECM I
|
||||
),
|
||||
5403: ( # Faint Scoped Warp Disruptor
|
||||
5403 : ( # Faint Scoped Warp Disruptor
|
||||
5401, # Fleeting Warp Disruptor I
|
||||
),
|
||||
23902: ( # 'Trebuchet' Heat Sink I
|
||||
23900, # 'Mangonel' Heat Sink I
|
||||
),
|
||||
1893: ( # 'Basic' Heat Sink
|
||||
1893 : ( # 'Basic' Heat Sink
|
||||
5845, # Heat Exhaust System
|
||||
5856, # C3S Convection Thermal Radiator
|
||||
5855, # 'Boreas' Coolant System
|
||||
5854, # Stamped Heat Sink
|
||||
),
|
||||
6160: ( # F-90 Compact Sensor Booster
|
||||
6160 : ( # F-90 Compact Sensor Booster
|
||||
20214, # Extra Radar ECCM Scanning Array I
|
||||
20220, # Extra Ladar ECCM Scanning Array I
|
||||
20226, # Extra Gravimetric ECCM Scanning Array I
|
||||
@@ -123,40 +123,40 @@ CONVERSIONS = {
|
||||
19952: ( # Umbra Scoped Radar ECM
|
||||
9520, # 'Penumbra' White Noise ECM
|
||||
),
|
||||
1952: ( # Sensor Booster II
|
||||
1952 : ( # Sensor Booster II
|
||||
2258, # ECCM - Omni II
|
||||
2259, # ECCM - Gravimetric II
|
||||
2260, # ECCM - Ladar II
|
||||
2261, # ECCM - Magnetometric II
|
||||
2262, # ECCM - Radar II
|
||||
),
|
||||
5282: ( # Linked Enduring Sensor Booster
|
||||
5282 : ( # Linked Enduring Sensor Booster
|
||||
7219, # Scattering ECCM Projector I
|
||||
),
|
||||
1986: ( # Signal Amplifier I
|
||||
1986 : ( # Signal Amplifier I
|
||||
2579, # Gravimetric Backup Array I
|
||||
2583, # Ladar Backup Array I
|
||||
2587, # Magnetometric Backup Array I
|
||||
2591, # Multi Sensor Backup Array I
|
||||
4013, # RADAR Backup Array I
|
||||
),
|
||||
4871: ( # Large Compact Pb-Acid Cap Battery
|
||||
4871 : ( # Large Compact Pb-Acid Cap Battery
|
||||
4875, # Large Ohm Capacitor Reserve I
|
||||
4869, # Large F-4a Ld-Sulfate Capacitor Charge Unit
|
||||
4873, # Large Peroxide Capacitor Power Cell
|
||||
),
|
||||
1964: ( # Remote Sensor Booster II
|
||||
1964 : ( # Remote Sensor Booster II
|
||||
1960, # ECCM Projector II
|
||||
),
|
||||
5933: ( # Counterbalanced Compact Gyrostabilizer
|
||||
5933 : ( # Counterbalanced Compact Gyrostabilizer
|
||||
5931, # Cross-Lateral Gyrostabilizer I
|
||||
5935, # F-M3 Munition Inertial Suspensor
|
||||
5929, # Pneumatic Stabilization Actuator I
|
||||
),
|
||||
4025: ( # X5 Enduring Stasis Webifier
|
||||
4025 : ( # X5 Enduring Stasis Webifier
|
||||
4029, # 'Langour' Drive Disruptor I
|
||||
),
|
||||
4027: ( # Fleeting Compact Stasis Webifier
|
||||
4027 : ( # Fleeting Compact Stasis Webifier
|
||||
4031, # Patterned Stasis Web I
|
||||
),
|
||||
22937: ( # 'Enterprise' Remote Tracking Computer
|
||||
@@ -165,7 +165,7 @@ CONVERSIONS = {
|
||||
22929: ( # 'Marketeer' Tracking Computer
|
||||
22927, # 'Economist' Tracking Computer I
|
||||
),
|
||||
1987: ( # Signal Amplifier II
|
||||
1987 : ( # Signal Amplifier II
|
||||
2580, # Gravimetric Backup Array II
|
||||
2584, # Ladar Backup Array II
|
||||
2588, # Magnetometric Backup Array II
|
||||
@@ -175,13 +175,13 @@ CONVERSIONS = {
|
||||
19939: ( # Enfeebling Scoped Ladar ECM
|
||||
9522, # Faint Phase Inversion ECM I
|
||||
),
|
||||
5340: ( # P-S Compact Remote Tracking Computer
|
||||
5340 : ( # P-S Compact Remote Tracking Computer
|
||||
5341, # 'Prayer' Remote Tracking Computer
|
||||
),
|
||||
19814: ( # Phased Scoped Target Painter
|
||||
19808, # Partial Weapon Navigation
|
||||
),
|
||||
1949: ( # 'Basic' Signal Amplifier
|
||||
1949 : ( # 'Basic' Signal Amplifier
|
||||
1946, # Basic RADAR Backup Array
|
||||
1982, # Basic Ladar Backup Array
|
||||
1983, # Basic Gravimetric Backup Array
|
||||
@@ -222,10 +222,10 @@ CONVERSIONS = {
|
||||
23416: ( # 'Peace' Large Remote Armor Repairer
|
||||
None, # 'Pacifier' Large Remote Armor Repairer
|
||||
),
|
||||
6176: ( # F-12 Enduring Tracking Computer
|
||||
6176 : ( # F-12 Enduring Tracking Computer
|
||||
6174, # Monopulse Tracking Mechanism I
|
||||
),
|
||||
6159: ( # Alumel-Wired Enduring Sensor Booster
|
||||
6159 : ( # Alumel-Wired Enduring Sensor Booster
|
||||
7917, # Alumel Radar ECCM Sensor Array I
|
||||
7918, # Alumel Ladar ECCM Sensor Array I
|
||||
7922, # Alumel Gravimetric ECCM Sensor Array I
|
||||
@@ -247,7 +247,7 @@ CONVERSIONS = {
|
||||
7914, # Prototype ECCM Magnetometric Sensor Cluster
|
||||
6158, # Prototype Sensor Booster
|
||||
),
|
||||
5849: ( # Extruded Compact Heat Sink
|
||||
5849 : ( # Extruded Compact Heat Sink
|
||||
5846, # Thermal Exhaust System I
|
||||
5858, # C4S Coiled Circuit Thermal Radiator
|
||||
5857, # 'Skadi' Coolant System I
|
||||
@@ -263,15 +263,15 @@ CONVERSIONS = {
|
||||
22945: ( # 'Executive' Remote Sensor Dampener
|
||||
22943, # 'Broker' Remote Sensor Dampener I
|
||||
),
|
||||
6173: ( # Optical Compact Tracking Computer
|
||||
6173 : ( # Optical Compact Tracking Computer
|
||||
6175, # 'Orion' Tracking CPU I
|
||||
),
|
||||
5279: ( # F-23 Compact Remote Sensor Booster
|
||||
5279 : ( # F-23 Compact Remote Sensor Booster
|
||||
7217, # Spot Pulsing ECCM I
|
||||
7220, # Phased Muon ECCM Caster I
|
||||
5280, # Connected Remote Sensor Booster
|
||||
),
|
||||
4787: ( # Small Compact Pb-Acid Cap Battery
|
||||
4787 : ( # Small Compact Pb-Acid Cap Battery
|
||||
4791, # Small Ohm Capacitor Reserve I
|
||||
4785, # Small F-4a Ld-Sulfate Capacitor Charge Unit
|
||||
4789, # Small Peroxide Capacitor Power Cell
|
||||
@@ -279,7 +279,7 @@ CONVERSIONS = {
|
||||
19946: ( # BZ-5 Scoped Gravimetric ECM
|
||||
9519, # FZ-3 Subversive Spatial Destabilizer ECM
|
||||
),
|
||||
6073: ( # Medium Compact Pb-Acid Cap Battery
|
||||
6073 : ( # Medium Compact Pb-Acid Cap Battery
|
||||
6097, # Medium Ohm Capacitor Reserve I
|
||||
6111, # Medium F-4a Ld-Sulfate Capacitor Charge Unit
|
||||
6083, # Medium Peroxide Capacitor Power Cell
|
||||
@@ -287,7 +287,7 @@ CONVERSIONS = {
|
||||
21484: ( # 'Full Duplex' Ballistic Control System
|
||||
21482, # Ballistic 'Purge' Targeting System I
|
||||
),
|
||||
6296: ( # F-89 Compact Signal Amplifier
|
||||
6296 : ( # F-89 Compact Signal Amplifier
|
||||
6218, # Protected Gravimetric Backup Cluster I
|
||||
6222, # Protected Ladar Backup Cluster I
|
||||
6226, # Protected Magnetometric Backup Cluster I
|
||||
@@ -324,7 +324,7 @@ CONVERSIONS = {
|
||||
6293, # Wavelength Signal Enhancer I
|
||||
6295, # Type-D Attenuation Signal Augmentation
|
||||
),
|
||||
5302: ( # Phased Muon Scoped Sensor Dampener
|
||||
5302 : ( # Phased Muon Scoped Sensor Dampener
|
||||
5300, # Indirect Scanning Dampening Unit I
|
||||
),
|
||||
}
|
||||
@@ -332,7 +332,7 @@ CONVERSIONS = {
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.iteritems():
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
|
||||
@@ -4,8 +4,6 @@ Migration 15
|
||||
- Delete projected modules on citadels
|
||||
"""
|
||||
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
sql = """
|
||||
|
||||
@@ -4,8 +4,6 @@ Migration 17
|
||||
- Moves all fleet boosters to the new schema
|
||||
"""
|
||||
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
from eos.db import saveddata_session
|
||||
@@ -18,23 +16,27 @@ def upgrade(saveddata_engine):
|
||||
JOIN wings w on w.ID = s.wingID
|
||||
JOIN gangs g on g.ID = w.gangID
|
||||
"""
|
||||
try:
|
||||
results = saveddata_session.execute(sql)
|
||||
|
||||
results = saveddata_session.execute(sql)
|
||||
inserts = []
|
||||
|
||||
inserts = []
|
||||
for row in results:
|
||||
boosted = row["boostedFit"]
|
||||
types = ("squad", "wing", "gang")
|
||||
for x in types:
|
||||
value = row["{}Boost".format(x)]
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
for row in results:
|
||||
boosted = row["boostedFit"]
|
||||
types = ("squad", "wing", "gang")
|
||||
for x in types:
|
||||
value = row["{}Boost".format(x)]
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
inserts.append({"boosterID": value, "boostedID": boosted, "active": 1})
|
||||
try:
|
||||
saveddata_session.execute(commandFits_table.insert(),
|
||||
{"boosterID": value, "boostedID": boosted, "active": 1})
|
||||
except Exception:
|
||||
pass
|
||||
saveddata_session.commit()
|
||||
inserts.append({"boosterID": value, "boostedID": boosted, "active": 1})
|
||||
try:
|
||||
saveddata_session.execute(commandFits_table.insert(),
|
||||
{"boosterID": value, "boostedID": boosted, "active": 1})
|
||||
except Exception:
|
||||
pass
|
||||
saveddata_session.commit()
|
||||
except:
|
||||
# Shouldn't fail unless you have updated database without the old fleet schema and manually modify the database version
|
||||
# If it does, simply fail. Fleet data migration isn't critically important here
|
||||
pass
|
||||
|
||||
@@ -60,7 +60,7 @@ CONVERSIONS = {
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.iteritems():
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
|
||||
@@ -4,8 +4,6 @@ Migration 19
|
||||
- Deletes broken references to fits from the commandFits table (see GH issue #844)
|
||||
"""
|
||||
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
from eos.db import saveddata_session
|
||||
|
||||
10
eos/db/migrations/upgrade21.py
Normal file
10
eos/db/migrations/upgrade21.py
Normal file
@@ -0,0 +1,10 @@
|
||||
"""
|
||||
Migration 21
|
||||
|
||||
- Fixes discrepancy in drone table where we may have an amount active that is not equal to the amount in the stack
|
||||
(we don't support activating only 2/5 drones). See GH issue #728
|
||||
"""
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
saveddata_engine.execute("UPDATE drones SET amountActive = amount where amountActive > 0 AND amountActive <> amount;")
|
||||
45
eos/db/migrations/upgrade22.py
Normal file
45
eos/db/migrations/upgrade22.py
Normal file
@@ -0,0 +1,45 @@
|
||||
"""
|
||||
Migration 22
|
||||
|
||||
- Adds the created and modified fields to most tables
|
||||
"""
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
|
||||
# 1 = created only
|
||||
# 2 = created and modified
|
||||
tables = {
|
||||
"boosters": 2,
|
||||
"cargo": 2,
|
||||
"characters": 2,
|
||||
"crest": 1,
|
||||
"damagePatterns": 2,
|
||||
"drones": 2,
|
||||
"fighters": 2,
|
||||
"fits": 2,
|
||||
"projectedFits": 2,
|
||||
"commandFits": 2,
|
||||
"implants": 2,
|
||||
"implantSets": 2,
|
||||
"modules": 2,
|
||||
"overrides": 2,
|
||||
"characterSkills": 2,
|
||||
"targetResists": 2
|
||||
}
|
||||
|
||||
for table in list(tables.keys()):
|
||||
|
||||
# midnight brain, there's probably a much more simple way to do this, but fuck it
|
||||
if tables[table] > 0:
|
||||
try:
|
||||
saveddata_engine.execute("SELECT created FROM {0} LIMIT 1;".format(table))
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE {} ADD COLUMN created DATETIME;".format(table))
|
||||
|
||||
if tables[table] > 1:
|
||||
try:
|
||||
saveddata_engine.execute("SELECT modified FROM {0} LIMIT 1;".format(table))
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE {} ADD COLUMN modified DATETIME;".format(table))
|
||||
13
eos/db/migrations/upgrade23.py
Normal file
13
eos/db/migrations/upgrade23.py
Normal file
@@ -0,0 +1,13 @@
|
||||
"""
|
||||
Migration 23
|
||||
|
||||
- Adds a sec status field to the character table
|
||||
"""
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
try:
|
||||
saveddata_engine.execute("SELECT secStatus FROM characters LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE characters ADD COLUMN secStatus FLOAT;")
|
||||
14
eos/db/migrations/upgrade24.py
Normal file
14
eos/db/migrations/upgrade24.py
Normal file
@@ -0,0 +1,14 @@
|
||||
"""
|
||||
Migration 24
|
||||
|
||||
- Adds a boolean value to fit to signify if fit should ignore restrictions
|
||||
"""
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
try:
|
||||
saveddata_engine.execute("SELECT ignoreRestrictions FROM fits LIMIT 1")
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
saveddata_engine.execute("ALTER TABLE fits ADD COLUMN ignoreRestrictions BOOLEAN")
|
||||
saveddata_engine.execute("UPDATE fits SET ignoreRestrictions = 0")
|
||||
4246
eos/db/migrations/upgrade25.py
Normal file
4246
eos/db/migrations/upgrade25.py
Normal file
File diff suppressed because it is too large
Load Diff
9
eos/db/migrations/upgrade26.py
Normal file
9
eos/db/migrations/upgrade26.py
Normal file
@@ -0,0 +1,9 @@
|
||||
"""
|
||||
Migration 26
|
||||
|
||||
- Deletes invalid command fit relationships caused by a bug (see #1244)
|
||||
"""
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
saveddata_engine.execute("DELETE FROM commandFits WHERE boosterID NOT IN (SELECT ID FROM fits) OR boostedID NOT IN (SELECT ID FROM fits)")
|
||||
9
eos/db/migrations/upgrade27.py
Normal file
9
eos/db/migrations/upgrade27.py
Normal file
@@ -0,0 +1,9 @@
|
||||
"""
|
||||
Migration 27
|
||||
|
||||
- Resets all alpha clones to 1 (CCP consolidated all alpha's into one skillset)
|
||||
"""
|
||||
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
saveddata_engine.execute("UPDATE characters SET alphaCloneID = 1 WHERE alphaCloneID IS NOT NULL")
|
||||
@@ -11,64 +11,64 @@ Migration 4
|
||||
"""
|
||||
|
||||
CONVERSIONS = {
|
||||
506: ( # 'Basic' Capacitor Power Relay
|
||||
506 : ( # 'Basic' Capacitor Power Relay
|
||||
8205, # Alpha Reactor Control: Capacitor Power Relay
|
||||
8209, # Marked Generator Refitting: Capacitor Power Relay
|
||||
8203, # Partial Power Plant Manager: Capacity Power Relay
|
||||
8207, # Type-E Power Core Modification: Capacitor Power Relay
|
||||
),
|
||||
8177: ( # Mark I Compact Capacitor Power Relay
|
||||
8177 : ( # Mark I Compact Capacitor Power Relay
|
||||
8173, # Beta Reactor Control: Capacitor Power Relay I
|
||||
),
|
||||
8175: ( # Type-D Restrained Capacitor Power Relay
|
||||
8175 : ( # Type-D Restrained Capacitor Power Relay
|
||||
8171, # Local Power Plant Manager: Capacity Power Relay I
|
||||
),
|
||||
|
||||
421: ( # 'Basic' Capacitor Recharger
|
||||
421 : ( # 'Basic' Capacitor Recharger
|
||||
4425, # AGM Capacitor Charge Array,
|
||||
4421, # F-a10 Buffer Capacitor Regenerator
|
||||
4423, # Industrial Capacitor Recharger
|
||||
4427, # Secondary Parallel Link-Capacitor
|
||||
),
|
||||
4435: ( # Eutectic Compact Cap Recharger
|
||||
4435 : ( # Eutectic Compact Cap Recharger
|
||||
4433, # Barton Reactor Capacitor Recharger I
|
||||
4431, # F-b10 Nominal Capacitor Regenerator
|
||||
4437, # Fixed Parallel Link-Capacitor I
|
||||
),
|
||||
|
||||
1315: ( # 'Basic' Expanded Cargohold
|
||||
1315 : ( # 'Basic' Expanded Cargohold
|
||||
5483, # Alpha Hull Mod Expanded Cargo
|
||||
5479, # Marked Modified SS Expanded Cargo
|
||||
5481, # Partial Hull Conversion Expanded Cargo
|
||||
5485, # Type-E Altered SS Expanded Cargo
|
||||
),
|
||||
5493: ( # Type-D Restrained Expanded Cargo
|
||||
5493 : ( # Type-D Restrained Expanded Cargo
|
||||
5491, # Beta Hull Mod Expanded Cargo
|
||||
5489, # Local Hull Conversion Expanded Cargo I
|
||||
5487, # Mark I Modified SS Expanded Cargo
|
||||
),
|
||||
|
||||
1401: ( # 'Basic' Inertial Stabilizers
|
||||
1401 : ( # 'Basic' Inertial Stabilizers
|
||||
5523, # Alpha Hull Mod Inertial Stabilizers
|
||||
5521, # Partial Hull Conversion Inertial Stabilizers
|
||||
5525, # Type-E Altered SS Inertial Stabilizers
|
||||
),
|
||||
5533: ( # Type-D Restrained Inertial Stabilizers
|
||||
5533 : ( # Type-D Restrained Inertial Stabilizers
|
||||
5531, # Beta Hull Mod Inertial Stabilizers
|
||||
5529, # Local Hull Conversion Inertial Stabilizers I
|
||||
5527, # Mark I Modified SS Inertial Stabilizers
|
||||
5519, # Marked Modified SS Inertial Stabilizers
|
||||
),
|
||||
|
||||
5239: ( # EP-S Gaussian Scoped Mining Laser
|
||||
5239 : ( # EP-S Gaussian Scoped Mining Laser
|
||||
5241, # Dual Diode Mining Laser I
|
||||
),
|
||||
5233: ( # Single Diode Basic Mining Laser
|
||||
5233 : ( # Single Diode Basic Mining Laser
|
||||
5231, # EP-R Argon Ion Basic Excavation Pulse
|
||||
5237, # Rubin Basic Particle Bore Stream
|
||||
5235, # Xenon Basic Drilling Beam
|
||||
),
|
||||
5245: ( # Particle Bore Compact Mining Laser
|
||||
5245 : ( # Particle Bore Compact Mining Laser
|
||||
5243, # XeCl Drilling Beam I
|
||||
),
|
||||
|
||||
@@ -79,53 +79,53 @@ CONVERSIONS = {
|
||||
22609, # Erin Mining Laser Upgrade
|
||||
),
|
||||
|
||||
1242: ( # 'Basic' Nanofiber Internal Structure
|
||||
1242 : ( # 'Basic' Nanofiber Internal Structure
|
||||
5591, # Alpha Hull Mod Nanofiber Structure
|
||||
5595, # Marked Modified SS Nanofiber Structure
|
||||
5559, # Partial Hull Conversion Nanofiber Structure
|
||||
5593, # Type-E Altered SS Nanofiber Structure
|
||||
),
|
||||
5599: ( # Type-D Restrained Nanofiber Structure
|
||||
5599 : ( # Type-D Restrained Nanofiber Structure
|
||||
5597, # Beta Hull Mod Nanofiber Structure
|
||||
5561, # Local Hull Conversion Nanofiber Structure I
|
||||
5601, # Mark I Modified SS Nanofiber Structure
|
||||
),
|
||||
|
||||
1192: ( # 'Basic' Overdrive Injector System
|
||||
1192 : ( # 'Basic' Overdrive Injector System
|
||||
5613, # Alpha Hull Mod Overdrive Injector
|
||||
5617, # Marked Modified SS Overdrive Injector
|
||||
5611, # Partial Hull Conversion Overdrive Injector
|
||||
5615, # Type-E Altered SS Overdrive Injector
|
||||
),
|
||||
5631: ( # Type-D Restrained Overdrive Injector
|
||||
5631 : ( # Type-D Restrained Overdrive Injector
|
||||
5629, # Beta Hull Mod Overdrive Injector
|
||||
5627, # Local Hull Conversion Overdrive Injector I
|
||||
5633, # Mark I Modified SS Overdrive Injector
|
||||
),
|
||||
|
||||
1537: ( # 'Basic' Power Diagnostic System
|
||||
1537 : ( # 'Basic' Power Diagnostic System
|
||||
8213, # Alpha Reactor Control: Diagnostic System
|
||||
8217, # Marked Generator Refitting: Diagnostic System
|
||||
8211, # Partial Power Plant Manager: Diagnostic System
|
||||
8215, # Type-E Power Core Modification: Diagnostic System
|
||||
8255, # Type-E Power Core Modification: Reaction Control
|
||||
),
|
||||
8225: ( # Mark I Compact Power Diagnostic System
|
||||
8225 : ( # Mark I Compact Power Diagnostic System
|
||||
8221, # Beta Reactor Control: Diagnostic System I
|
||||
8219, # Local Power Plant Manager: Diagnostic System I
|
||||
8223, # Type-D Power Core Modification: Diagnostic System
|
||||
),
|
||||
|
||||
1240: ( # 'Basic' Reinforced Bulkheads
|
||||
1240 : ( # 'Basic' Reinforced Bulkheads
|
||||
5677, # Alpha Hull Mod Reinforced Bulkheads
|
||||
5681, # Marked Modified SS Reinforced Bulkheads
|
||||
5675, # Partial Hull Conversion Reinforced Bulkheads
|
||||
5679, # Type-E Altered SS Reinforced Bulkheads
|
||||
),
|
||||
5649: ( # Mark I Compact Reinforced Bulkheads
|
||||
5649 : ( # Mark I Compact Reinforced Bulkheads
|
||||
5645, # Beta Hull Mod Reinforced Bulkheads
|
||||
),
|
||||
5647: ( # Type-D Restrained Reinforced Bulkheads
|
||||
5647 : ( # Type-D Restrained Reinforced Bulkheads
|
||||
5643, # Local Hull Conversion Reinforced Bulkheads I
|
||||
),
|
||||
}
|
||||
@@ -133,7 +133,7 @@ CONVERSIONS = {
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.iteritems():
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
|
||||
@@ -7,5 +7,5 @@ Overwrites damage profile 0 to reset bad uniform values (bad values set with bug
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
saveddata_engine.execute('DELETE FROM damagePatterns WHERE name LIKE ? OR ID LIKE ?', ("Uniform", "1"))
|
||||
saveddata_engine.execute('INSERT INTO damagePatterns VALUES (?, ?, ?, ?, ?, ?, ?)',
|
||||
saveddata_engine.execute('INSERT INTO damagePatterns (ID, name, emAmount, thermalAmount, kineticAmount, explosiveAmount, ownerID) VALUES (?, ?, ?, ?, ?, ?, ?)',
|
||||
(1, "Uniform", 25, 25, 25, 25, None))
|
||||
|
||||
@@ -17,7 +17,7 @@ CONVERSIONS = {
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert ships
|
||||
for replacement_item, list in CONVERSIONS.iteritems():
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "fits" SET "shipID" = ? WHERE "shipID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
|
||||
@@ -8,16 +8,16 @@ Migration 8
|
||||
"""
|
||||
|
||||
CONVERSIONS = {
|
||||
8529: ( # Large F-S9 Regolith Compact Shield Extender
|
||||
8529 : ( # Large F-S9 Regolith Compact Shield Extender
|
||||
8409, # Large Subordinate Screen Stabilizer I
|
||||
),
|
||||
8419: ( # Large Azeotropic Restrained Shield Extender
|
||||
8419 : ( # Large Azeotropic Restrained Shield Extender
|
||||
8489, # Large Supplemental Barrier Emitter I
|
||||
),
|
||||
8517: ( # Medium F-S9 Regolith Compact Shield Extender
|
||||
8517 : ( # Medium F-S9 Regolith Compact Shield Extender
|
||||
8397, # Medium Subordinate Screen Stabilizer I
|
||||
),
|
||||
8433: ( # Medium Azeotropic Restrained Shield Extender
|
||||
8433 : ( # Medium Azeotropic Restrained Shield Extender
|
||||
8477, # Medium Supplemental Barrier Emitter I
|
||||
),
|
||||
20627: ( # Small 'Trapper' Shield Extender
|
||||
@@ -28,10 +28,10 @@ CONVERSIONS = {
|
||||
8387, # Micro Subordinate Screen Stabilizer I
|
||||
8465, # Micro Supplemental Barrier Emitter I
|
||||
),
|
||||
8521: ( # Small F-S9 Regolith Compact Shield Extender
|
||||
8521 : ( # Small F-S9 Regolith Compact Shield Extender
|
||||
8401, # Small Subordinate Screen Stabilizer I
|
||||
),
|
||||
8427: ( # Small Azeotropic Restrained Shield Extender
|
||||
8427 : ( # Small Azeotropic Restrained Shield Extender
|
||||
8481, # Small Supplemental Barrier Emitter I
|
||||
),
|
||||
11343: ( # 100mm Crystalline Carbonide Restrained Plates
|
||||
@@ -77,7 +77,7 @@ CONVERSIONS = {
|
||||
|
||||
def upgrade(saveddata_engine):
|
||||
# Convert modules
|
||||
for replacement_item, list in CONVERSIONS.iteritems():
|
||||
for replacement_item, list in CONVERSIONS.items():
|
||||
for retired_item in list:
|
||||
saveddata_engine.execute('UPDATE "modules" SET "itemID" = ? WHERE "itemID" = ?',
|
||||
(replacement_item, retired_item))
|
||||
|
||||
@@ -20,6 +20,6 @@ CREATE TABLE boostersTemp (
|
||||
def upgrade(saveddata_engine):
|
||||
saveddata_engine.execute(tmpTable)
|
||||
saveddata_engine.execute(
|
||||
"INSERT INTO boostersTemp (ID, itemID, fitID, active) SELECT ID, itemID, fitID, active FROM boosters")
|
||||
"INSERT INTO boostersTemp (ID, itemID, fitID, active) SELECT ID, itemID, fitID, active FROM boosters")
|
||||
saveddata_engine.execute("DROP TABLE boosters")
|
||||
saveddata_engine.execute("ALTER TABLE boostersTemp RENAME TO boosters")
|
||||
|
||||
@@ -17,33 +17,39 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, ForeignKey, Integer, Boolean
|
||||
from sqlalchemy.ext.associationproxy import association_proxy
|
||||
from sqlalchemy import Table, Column, ForeignKey, Integer, Boolean, DateTime
|
||||
from sqlalchemy.orm import mapper, relation
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.types import Booster
|
||||
from eos.saveddata.booster import Booster
|
||||
from eos.saveddata.boosterSideEffect import BoosterSideEffect
|
||||
|
||||
boosters_table = Table("boosters", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("itemID", Integer),
|
||||
Column("fitID", Integer, ForeignKey("fits.ID"), nullable=False),
|
||||
Column("active", Boolean),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now),
|
||||
)
|
||||
|
||||
# Legacy booster side effect code, should disable but a mapper relies on it.
|
||||
activeSideEffects_table = Table("boostersActiveSideEffects", saveddata_meta,
|
||||
Column("boosterID", ForeignKey("boosters.ID"), primary_key=True),
|
||||
Column("effectID", Integer, primary_key=True))
|
||||
|
||||
booster_side_effect_table = Table("boosterSideEffects", saveddata_meta,
|
||||
Column("boosterID", Integer, ForeignKey("boosters.ID"), primary_key=True, index=True),
|
||||
Column("effectID", Integer, nullable=False, primary_key=True),
|
||||
Column("active", Boolean, default=False)
|
||||
)
|
||||
|
||||
|
||||
class ActiveSideEffectsDummy(object):
|
||||
def __init__(self, effectID):
|
||||
self.effectID = effectID
|
||||
|
||||
|
||||
mapper(ActiveSideEffectsDummy, activeSideEffects_table)
|
||||
mapper(Booster, boosters_table,
|
||||
properties={"_Booster__activeSideEffectDummies": relation(ActiveSideEffectsDummy)})
|
||||
properties={
|
||||
"_Booster__sideEffects": relation(
|
||||
BoosterSideEffect,
|
||||
backref="booster",
|
||||
cascade='all, delete, delete-orphan'),
|
||||
}
|
||||
)
|
||||
|
||||
Booster._Booster__activeSideEffectIDs = association_proxy("_Booster__activeSideEffectDummies", "effectID")
|
||||
|
||||
mapper(BoosterSideEffect, booster_side_effect_table)
|
||||
|
||||
@@ -17,16 +17,25 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey
|
||||
from sqlalchemy.orm import mapper
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, DateTime
|
||||
from sqlalchemy.orm import mapper, relation
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.types import Cargo
|
||||
from eos.saveddata.cargo import Cargo
|
||||
from eos.saveddata.fit import Fit
|
||||
|
||||
cargo_table = Table("cargo", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("fitID", Integer, ForeignKey("fits.ID"), nullable=False, index=True),
|
||||
Column("itemID", Integer, nullable=False),
|
||||
Column("amount", Integer, nullable=False))
|
||||
Column("amount", Integer, nullable=False),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now),
|
||||
)
|
||||
|
||||
mapper(Cargo, cargo_table)
|
||||
mapper(Cargo, cargo_table,
|
||||
properties={
|
||||
"owner": relation(Fit)
|
||||
}
|
||||
)
|
||||
|
||||
@@ -17,13 +17,16 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, String
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, String, DateTime, Float
|
||||
from sqlalchemy.orm import relation, mapper
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.db.saveddata.implant import charImplants_table
|
||||
from eos.effectHandlerHelpers import HandledImplantBoosterList
|
||||
from eos.types import Character, User, Skill, Implant
|
||||
from eos.saveddata.implant import Implant
|
||||
from eos.saveddata.user import User
|
||||
from eos.saveddata.character import Character, Skill
|
||||
|
||||
characters_table = Table("characters", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
@@ -34,27 +37,31 @@ characters_table = Table("characters", saveddata_meta,
|
||||
Column("chars", String, nullable=True),
|
||||
Column("defaultLevel", Integer, nullable=True),
|
||||
Column("alphaCloneID", Integer, nullable=True),
|
||||
Column("ownerID", ForeignKey("users.ID"), nullable=True))
|
||||
Column("ownerID", ForeignKey("users.ID"), nullable=True),
|
||||
Column("secStatus", Float, nullable=True, default=0.0),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now))
|
||||
|
||||
mapper(Character, characters_table,
|
||||
properties={
|
||||
"_Character__alphaCloneID": characters_table.c.alphaCloneID,
|
||||
"savedName": characters_table.c.name,
|
||||
"_Character__owner": relation(
|
||||
User,
|
||||
backref="characters"),
|
||||
"_Character__skills": relation(
|
||||
Skill,
|
||||
backref="character",
|
||||
cascade="all,delete-orphan"),
|
||||
"_Character__implants": relation(
|
||||
Implant,
|
||||
collection_class=HandledImplantBoosterList,
|
||||
cascade='all,delete-orphan',
|
||||
backref='character',
|
||||
single_parent=True,
|
||||
primaryjoin=charImplants_table.c.charID == characters_table.c.ID,
|
||||
secondaryjoin=charImplants_table.c.implantID == Implant.ID,
|
||||
secondary=charImplants_table),
|
||||
"savedName" : characters_table.c.name,
|
||||
"_Character__secStatus": characters_table.c.secStatus,
|
||||
"_Character__owner" : relation(
|
||||
User,
|
||||
backref="characters"),
|
||||
"_Character__skills" : relation(
|
||||
Skill,
|
||||
backref="character",
|
||||
cascade="all,delete-orphan"),
|
||||
"_Character__implants" : relation(
|
||||
Implant,
|
||||
collection_class=HandledImplantBoosterList,
|
||||
cascade='all,delete-orphan',
|
||||
backref='character',
|
||||
single_parent=True,
|
||||
primaryjoin=charImplants_table.c.charID == characters_table.c.ID,
|
||||
secondaryjoin=charImplants_table.c.implantID == Implant.ID,
|
||||
secondary=charImplants_table),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -17,15 +17,18 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, String
|
||||
from sqlalchemy import Table, Column, Integer, String, DateTime
|
||||
from sqlalchemy.orm import mapper
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.types import CrestChar
|
||||
from eos.saveddata.crestchar import CrestChar
|
||||
|
||||
crest_table = Table("crest", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("name", String, nullable=False, unique=True),
|
||||
Column("refresh_token", String, nullable=False))
|
||||
Column("refresh_token", String, nullable=False),
|
||||
# These records aren't updated. Instead, they are dropped and created, hence we don't have a modified field
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now))
|
||||
|
||||
mapper(CrestChar, crest_table)
|
||||
|
||||
@@ -17,11 +17,12 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, String
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, String, DateTime
|
||||
from sqlalchemy.orm import mapper
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.types import DamagePattern
|
||||
from eos.saveddata.damagePattern import DamagePattern
|
||||
|
||||
damagePatterns_table = Table("damagePatterns", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
@@ -30,6 +31,9 @@ damagePatterns_table = Table("damagePatterns", saveddata_meta,
|
||||
Column("thermalAmount", Integer),
|
||||
Column("kineticAmount", Integer),
|
||||
Column("explosiveAmount", Integer),
|
||||
Column("ownerID", ForeignKey("users.ID"), nullable=True))
|
||||
Column("ownerID", ForeignKey("users.ID"), nullable=True),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now)
|
||||
)
|
||||
|
||||
mapper(DamagePattern, damagePatterns_table)
|
||||
|
||||
@@ -17,13 +17,13 @@
|
||||
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
import sqlalchemy
|
||||
import logging
|
||||
from sqlalchemy.exc import DatabaseError
|
||||
from logbook import Logger
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
pyfalog = Logger(__name__)
|
||||
|
||||
|
||||
class DatabaseCleanup:
|
||||
class DatabaseCleanup(object):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@@ -32,8 +32,8 @@ class DatabaseCleanup:
|
||||
try:
|
||||
results = saveddata_engine.execute(query)
|
||||
return results
|
||||
except sqlalchemy.exc.DatabaseError:
|
||||
logger.error("Failed to connect to database or error executing query:\n%s",query)
|
||||
except DatabaseError:
|
||||
pyfalog.error("Failed to connect to database or error executing query:\n{0}", query)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
@@ -41,7 +41,7 @@ class DatabaseCleanup:
|
||||
# Find orphaned character skills.
|
||||
# This solves an issue where the character doesn't exist, but skills for that character do.
|
||||
# See issue #917
|
||||
logger.debug("Running database cleanup for character skills.")
|
||||
pyfalog.debug("Running database cleanup for character skills.")
|
||||
query = "SELECT COUNT(*) AS num FROM characterSkills WHERE characterID NOT IN (SELECT ID from characters)"
|
||||
results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
|
||||
@@ -53,14 +53,14 @@ class DatabaseCleanup:
|
||||
if row and row['num']:
|
||||
query = "DELETE FROM characterSkills WHERE characterID NOT IN (SELECT ID from characters)"
|
||||
delete = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
logger.error("Database corruption found. Cleaning up %d records.", delete.rowcount)
|
||||
pyfalog.error("Database corruption found. Cleaning up {0} records.", delete.rowcount)
|
||||
|
||||
@staticmethod
|
||||
def OrphanedFitDamagePatterns(saveddata_engine):
|
||||
# Find orphaned damage patterns.
|
||||
# This solves an issue where the damage pattern doesn't exist, but fits reference the pattern.
|
||||
# See issue #777
|
||||
logger.debug("Running database cleanup for orphaned damage patterns attached to fits.")
|
||||
pyfalog.debug("Running database cleanup for orphaned damage patterns attached to fits.")
|
||||
query = "SELECT COUNT(*) AS num FROM fits WHERE damagePatternID NOT IN (SELECT ID FROM damagePatterns) OR damagePatternID IS NULL"
|
||||
results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
|
||||
@@ -80,20 +80,20 @@ class DatabaseCleanup:
|
||||
rows = uniform_results.fetchall()
|
||||
|
||||
if len(rows) == 0:
|
||||
logger.error("Missing uniform damage pattern.")
|
||||
pyfalog.error("Missing uniform damage pattern.")
|
||||
elif len(rows) > 1:
|
||||
logger.error("More than one uniform damage pattern found.")
|
||||
pyfalog.error("More than one uniform damage pattern found.")
|
||||
else:
|
||||
uniform_damage_pattern_id = rows[0]['ID']
|
||||
update_query = "UPDATE 'fits' SET 'damagePatternID' = " + str(uniform_damage_pattern_id) + \
|
||||
" WHERE damagePatternID NOT IN (SELECT ID FROM damagePatterns) OR damagePatternID IS NULL"
|
||||
update_query = "UPDATE 'fits' SET 'damagePatternID' = {} " \
|
||||
"WHERE damagePatternID NOT IN (SELECT ID FROM damagePatterns) OR damagePatternID IS NULL".format(uniform_damage_pattern_id)
|
||||
update_results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, update_query)
|
||||
logger.error("Database corruption found. Cleaning up %d records.", update_results.rowcount)
|
||||
pyfalog.error("Database corruption found. Cleaning up {0} records.", update_results.rowcount)
|
||||
|
||||
@staticmethod
|
||||
def OrphanedFitCharacterIDs(saveddata_engine):
|
||||
# Find orphaned character IDs. This solves an issue where the character doesn't exist, but fits reference the pattern.
|
||||
logger.debug("Running database cleanup for orphaned characters attached to fits.")
|
||||
pyfalog.debug("Running database cleanup for orphaned characters attached to fits.")
|
||||
query = "SELECT COUNT(*) AS num FROM fits WHERE characterID NOT IN (SELECT ID FROM characters) OR characterID IS NULL"
|
||||
results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
|
||||
@@ -113,22 +113,22 @@ class DatabaseCleanup:
|
||||
rows = all5_results.fetchall()
|
||||
|
||||
if len(rows) == 0:
|
||||
logger.error("Missing 'All 5' character.")
|
||||
pyfalog.error("Missing 'All 5' character.")
|
||||
elif len(rows) > 1:
|
||||
logger.error("More than one 'All 5' character found.")
|
||||
pyfalog.error("More than one 'All 5' character found.")
|
||||
else:
|
||||
all5_id = rows[0]['ID']
|
||||
update_query = "UPDATE 'fits' SET 'characterID' = " + str(all5_id) + \
|
||||
update_query = "UPDATE 'fits' SET 'characterID' = " + str(all5_id) + \
|
||||
" WHERE characterID not in (select ID from characters) OR characterID IS NULL"
|
||||
update_results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, update_query)
|
||||
logger.error("Database corruption found. Cleaning up %d records.", update_results.rowcount)
|
||||
pyfalog.error("Database corruption found. Cleaning up {0} records.", update_results.rowcount)
|
||||
|
||||
@staticmethod
|
||||
def NullDamagePatternNames(saveddata_engine):
|
||||
# Find damage patterns that are missing the name.
|
||||
# This solves an issue where the damage pattern ends up with a name that is null.
|
||||
# See issue #949
|
||||
logger.debug("Running database cleanup for missing damage pattern names.")
|
||||
pyfalog.debug("Running database cleanup for missing damage pattern names.")
|
||||
query = "SELECT COUNT(*) AS num FROM damagePatterns WHERE name IS NULL OR name = ''"
|
||||
results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
|
||||
@@ -140,14 +140,14 @@ class DatabaseCleanup:
|
||||
if row and row['num']:
|
||||
query = "DELETE FROM damagePatterns WHERE name IS NULL OR name = ''"
|
||||
delete = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
logger.error("Database corruption found. Cleaning up %d records.", delete.rowcount)
|
||||
pyfalog.error("Database corruption found. Cleaning up {0} records.", delete.rowcount)
|
||||
|
||||
@staticmethod
|
||||
def NullTargetResistNames(saveddata_engine):
|
||||
# Find target resists that are missing the name.
|
||||
# This solves an issue where the target resist ends up with a name that is null.
|
||||
# See issue #949
|
||||
logger.debug("Running database cleanup for missing target resist names.")
|
||||
pyfalog.debug("Running database cleanup for missing target resist names.")
|
||||
query = "SELECT COUNT(*) AS num FROM targetResists WHERE name IS NULL OR name = ''"
|
||||
results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
|
||||
@@ -159,4 +159,81 @@ class DatabaseCleanup:
|
||||
if row and row['num']:
|
||||
query = "DELETE FROM targetResists WHERE name IS NULL OR name = ''"
|
||||
delete = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
logger.error("Database corruption found. Cleaning up %d records.", delete.rowcount)
|
||||
pyfalog.error("Database corruption found. Cleaning up {0} records.", delete.rowcount)
|
||||
|
||||
@staticmethod
|
||||
def OrphanedFitIDItemID(saveddata_engine):
|
||||
# Orphaned items that are missing the fit ID or item ID.
|
||||
# See issue #954
|
||||
for table in ['drones', 'cargo', 'fighters']:
|
||||
pyfalog.debug("Running database cleanup for orphaned {0} items.", table)
|
||||
query = "SELECT COUNT(*) AS num FROM {} WHERE itemID IS NULL OR itemID = '' or itemID = '0' or fitID IS NULL OR fitID = '' or fitID = '0'".format(
|
||||
table)
|
||||
results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
|
||||
if results is None:
|
||||
return
|
||||
|
||||
row = results.first()
|
||||
|
||||
if row and row['num']:
|
||||
query = "DELETE FROM {} WHERE itemID IS NULL OR itemID = '' or itemID = '0' or fitID IS NULL OR fitID = '' or fitID = '0'".format(
|
||||
table)
|
||||
delete = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
pyfalog.error("Database corruption found. Cleaning up {0} records.", delete.rowcount)
|
||||
|
||||
for table in ['modules']:
|
||||
pyfalog.debug("Running database cleanup for orphaned {0} items.", table)
|
||||
query = "SELECT COUNT(*) AS num FROM {} WHERE itemID = '0' or fitID IS NULL OR fitID = '' or fitID = '0'".format(
|
||||
table)
|
||||
results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
|
||||
if results is None:
|
||||
return
|
||||
|
||||
row = results.first()
|
||||
|
||||
if row and row['num']:
|
||||
query = "DELETE FROM {} WHERE itemID = '0' or fitID IS NULL OR fitID = '' or fitID = '0'".format(table)
|
||||
delete = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
pyfalog.error("Database corruption found. Cleaning up {0} records.", delete.rowcount)
|
||||
|
||||
@staticmethod
|
||||
def NullDamageTargetPatternValues(saveddata_engine):
|
||||
# Find patterns that have null values
|
||||
# See issue #954
|
||||
for profileType in ['damagePatterns', 'targetResists']:
|
||||
for damageType in ['em', 'thermal', 'kinetic', 'explosive']:
|
||||
pyfalog.debug("Running database cleanup for null {0} values. ({1})", profileType, damageType)
|
||||
query = "SELECT COUNT(*) AS num FROM {0} WHERE {1}Amount IS NULL OR {1}Amount = ''".format(profileType,
|
||||
damageType)
|
||||
results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
|
||||
if results is None:
|
||||
return
|
||||
|
||||
row = results.first()
|
||||
|
||||
if row and row['num']:
|
||||
query = "UPDATE '{0}' SET '{1}Amount' = '0' WHERE {1}Amount IS NULL OR {1}Amount = ''".format(profileType,
|
||||
damageType)
|
||||
delete = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
pyfalog.error("Database corruption found. Cleaning up {0} records.", delete.rowcount)
|
||||
|
||||
@staticmethod
|
||||
def DuplicateSelectedAmmoName(saveddata_engine):
|
||||
# Orphaned items that are missing the fit ID or item ID.
|
||||
# See issue #954
|
||||
pyfalog.debug("Running database cleanup for duplicated selected ammo profiles.")
|
||||
query = "SELECT COUNT(*) AS num FROM damagePatterns WHERE name = 'Selected Ammo'"
|
||||
results = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
|
||||
if results is None:
|
||||
return
|
||||
|
||||
row = results.first()
|
||||
|
||||
if row and row['num'] > 1:
|
||||
query = "DELETE FROM damagePatterns WHERE name = 'Selected Ammo'"
|
||||
delete = DatabaseCleanup.ExecuteSQLQuery(saveddata_engine, query)
|
||||
pyfalog.error("Database corruption found. Cleaning up {0} records.", delete.rowcount)
|
||||
|
||||
@@ -17,11 +17,13 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, Boolean
|
||||
from sqlalchemy.orm import mapper
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, Boolean, DateTime
|
||||
from sqlalchemy.orm import mapper, relation
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.types import Drone
|
||||
from eos.saveddata.drone import Drone
|
||||
from eos.saveddata.fit import Fit
|
||||
|
||||
drones_table = Table("drones", saveddata_meta,
|
||||
Column("groupID", Integer, primary_key=True),
|
||||
@@ -29,6 +31,13 @@ drones_table = Table("drones", saveddata_meta,
|
||||
Column("itemID", Integer, nullable=False),
|
||||
Column("amount", Integer, nullable=False),
|
||||
Column("amountActive", Integer, nullable=False),
|
||||
Column("projected", Boolean, default=False))
|
||||
Column("projected", Boolean, default=False),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now)
|
||||
)
|
||||
|
||||
mapper(Drone, drones_table)
|
||||
mapper(Drone, drones_table,
|
||||
properties={
|
||||
"owner": relation(Fit)
|
||||
}
|
||||
)
|
||||
|
||||
@@ -17,12 +17,14 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, Boolean
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, Boolean, DateTime
|
||||
from sqlalchemy.orm import mapper, relation
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.types import Fighter, Fit
|
||||
from eos.types import FighterAbility
|
||||
from eos.saveddata.fighterAbility import FighterAbility
|
||||
from eos.saveddata.fighter import Fighter
|
||||
from eos.saveddata.fit import Fit
|
||||
|
||||
fighters_table = Table("fighters", saveddata_meta,
|
||||
Column("groupID", Integer, primary_key=True),
|
||||
@@ -30,7 +32,10 @@ fighters_table = Table("fighters", saveddata_meta,
|
||||
Column("itemID", Integer, nullable=False),
|
||||
Column("active", Boolean, nullable=True),
|
||||
Column("amount", Integer, nullable=False),
|
||||
Column("projected", Boolean, default=False))
|
||||
Column("projected", Boolean, default=False),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now)
|
||||
)
|
||||
|
||||
fighter_abilities_table = Table("fightersAbilities", saveddata_meta,
|
||||
Column("groupID", Integer, ForeignKey("fighters.groupID"), primary_key=True,
|
||||
@@ -40,11 +45,11 @@ fighter_abilities_table = Table("fightersAbilities", saveddata_meta,
|
||||
|
||||
mapper(Fighter, fighters_table,
|
||||
properties={
|
||||
"owner": relation(Fit),
|
||||
"owner" : relation(Fit),
|
||||
"_Fighter__abilities": relation(
|
||||
FighterAbility,
|
||||
backref="fighter",
|
||||
cascade='all, delete, delete-orphan'),
|
||||
FighterAbility,
|
||||
backref="fighter",
|
||||
cascade='all, delete, delete-orphan'),
|
||||
})
|
||||
|
||||
mapper(FighterAbility, fighter_abilities_table)
|
||||
|
||||
@@ -21,7 +21,8 @@ from sqlalchemy.ext.associationproxy import association_proxy
|
||||
from sqlalchemy.orm.collections import attribute_mapped_collection
|
||||
from sqlalchemy.sql import and_
|
||||
from sqlalchemy.orm import relation, reconstructor, mapper, relationship
|
||||
from sqlalchemy import ForeignKey, Column, Integer, String, Table, Boolean
|
||||
from sqlalchemy import ForeignKey, Column, Integer, String, Table, Boolean, DateTime
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.db import saveddata_session
|
||||
@@ -32,8 +33,17 @@ from eos.db.saveddata.implant import fitImplants_table
|
||||
from eos.db.saveddata.module import modules_table
|
||||
from eos.effectHandlerHelpers import HandledModuleList, HandledImplantBoosterList, HandledProjectedModList, \
|
||||
HandledDroneCargoList, HandledProjectedDroneList
|
||||
from eos.types import Fit as es_Fit, Module, User, Booster, Drone, Fighter, Cargo, Implant, Character, DamagePattern, \
|
||||
TargetResists, ImplantLocation
|
||||
from eos.saveddata.implant import Implant
|
||||
from eos.saveddata.character import Character
|
||||
from eos.saveddata.user import User
|
||||
from eos.saveddata.fighter import Fighter
|
||||
from eos.saveddata.fit import Fit as es_Fit, ImplantLocation
|
||||
from eos.saveddata.drone import Drone
|
||||
from eos.saveddata.booster import Booster
|
||||
from eos.saveddata.module import Module
|
||||
from eos.saveddata.cargo import Cargo
|
||||
from eos.saveddata.damagePattern import DamagePattern
|
||||
from eos.saveddata.targetResists import TargetResists
|
||||
|
||||
fits_table = Table("fits", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
@@ -48,6 +58,9 @@ fits_table = Table("fits", saveddata_meta,
|
||||
Column("modeID", Integer, nullable=True),
|
||||
Column("implantLocation", Integer, nullable=False, default=ImplantLocation.FIT),
|
||||
Column("notes", String, nullable=True),
|
||||
Column("ignoreRestrictions", Boolean, default=0),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, default=datetime.datetime.now, onupdate=datetime.datetime.now)
|
||||
)
|
||||
|
||||
projectedFits_table = Table("projectedFits", saveddata_meta,
|
||||
@@ -55,12 +68,16 @@ projectedFits_table = Table("projectedFits", saveddata_meta,
|
||||
Column("victimID", ForeignKey("fits.ID"), primary_key=True),
|
||||
Column("amount", Integer, nullable=False, default=1),
|
||||
Column("active", Boolean, nullable=False, default=1),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now)
|
||||
)
|
||||
|
||||
commandFits_table = Table("commandFits", saveddata_meta,
|
||||
Column("boosterID", ForeignKey("fits.ID"), primary_key=True),
|
||||
Column("boostedID", ForeignKey("fits.ID"), primary_key=True),
|
||||
Column("active", Boolean, nullable=False, default=1)
|
||||
Column("active", Boolean, nullable=False, default=1),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now)
|
||||
)
|
||||
|
||||
|
||||
@@ -91,7 +108,7 @@ class ProjectedFit(object):
|
||||
|
||||
def __repr__(self):
|
||||
return "ProjectedFit(sourceID={}, victimID={}, amount={}, active={}) at {}".format(
|
||||
self.sourceID, self.victimID, self.amount, self.active, hex(id(self))
|
||||
self.sourceID, self.victimID, self.amount, self.active, hex(id(self))
|
||||
)
|
||||
|
||||
|
||||
@@ -111,120 +128,131 @@ class CommandFit(object):
|
||||
|
||||
def __repr__(self):
|
||||
return "CommandFit(boosterID={}, boostedID={}, active={}) at {}".format(
|
||||
self.boosterID, self.boostedID, self.active, hex(id(self))
|
||||
self.boosterID, self.boostedID, self.active, hex(id(self))
|
||||
)
|
||||
|
||||
|
||||
es_Fit._Fit__projectedFits = association_proxy(
|
||||
"victimOf", # look at the victimOf association...
|
||||
"source_fit", # .. and return the source fits
|
||||
creator=lambda sourceID, source_fit: ProjectedFit(sourceID, source_fit)
|
||||
"victimOf", # look at the victimOf association...
|
||||
"source_fit", # .. and return the source fits
|
||||
creator=lambda sourceID, source_fit: ProjectedFit(sourceID, source_fit)
|
||||
)
|
||||
|
||||
es_Fit._Fit__commandFits = association_proxy(
|
||||
"boostedOf", # look at the boostedOf association...
|
||||
"booster_fit", # .. and return the booster fit
|
||||
creator=lambda boosterID, booster_fit: CommandFit(boosterID, booster_fit)
|
||||
"boostedOf", # look at the boostedOf association...
|
||||
"booster_fit", # .. and return the booster fit
|
||||
creator=lambda boosterID, booster_fit: CommandFit(boosterID, booster_fit)
|
||||
)
|
||||
|
||||
|
||||
# These relationships are broken out so that we can easily access it in the events stuff
|
||||
# We sometimes don't want particular relationships to cause a fit modified update (eg: projecting
|
||||
# a fit onto another would 'modify' both fits unless the following relationship is ignored)
|
||||
projectedFitSourceRel = relationship(
|
||||
ProjectedFit,
|
||||
primaryjoin=projectedFits_table.c.sourceID == fits_table.c.ID,
|
||||
backref='source_fit',
|
||||
collection_class=attribute_mapped_collection('victimID'),
|
||||
cascade='all, delete, delete-orphan')
|
||||
|
||||
|
||||
boostedOntoRel = relationship(
|
||||
CommandFit,
|
||||
primaryjoin=commandFits_table.c.boosterID == fits_table.c.ID,
|
||||
backref='booster_fit',
|
||||
collection_class=attribute_mapped_collection('boostedID'),
|
||||
cascade='all, delete, delete-orphan')
|
||||
|
||||
mapper(es_Fit, fits_table,
|
||||
properties={
|
||||
"_Fit__modules": relation(
|
||||
Module,
|
||||
collection_class=HandledModuleList,
|
||||
primaryjoin=and_(modules_table.c.fitID == fits_table.c.ID, modules_table.c.projected == False), # noqa
|
||||
order_by=modules_table.c.position,
|
||||
cascade='all, delete, delete-orphan'),
|
||||
Module,
|
||||
collection_class=HandledModuleList,
|
||||
primaryjoin=and_(modules_table.c.fitID == fits_table.c.ID, modules_table.c.projected == False), # noqa
|
||||
order_by=modules_table.c.position,
|
||||
cascade='all, delete, delete-orphan'),
|
||||
"_Fit__projectedModules": relation(
|
||||
Module,
|
||||
collection_class=HandledProjectedModList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(modules_table.c.fitID == fits_table.c.ID, modules_table.c.projected == True)), # noqa
|
||||
Module,
|
||||
collection_class=HandledProjectedModList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(modules_table.c.fitID == fits_table.c.ID, modules_table.c.projected == True)), # noqa
|
||||
"owner": relation(
|
||||
User,
|
||||
backref="fits"),
|
||||
User,
|
||||
backref="fits"),
|
||||
"itemID": fits_table.c.shipID,
|
||||
"shipID": fits_table.c.shipID,
|
||||
"_Fit__boosters": relation(
|
||||
Booster,
|
||||
collection_class=HandledImplantBoosterList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True),
|
||||
Booster,
|
||||
collection_class=HandledImplantBoosterList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
backref='owner',
|
||||
single_parent=True),
|
||||
"_Fit__drones": relation(
|
||||
Drone,
|
||||
collection_class=HandledDroneCargoList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(drones_table.c.fitID == fits_table.c.ID, drones_table.c.projected == False)), # noqa
|
||||
Drone,
|
||||
collection_class=HandledDroneCargoList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(drones_table.c.fitID == fits_table.c.ID, drones_table.c.projected == False)), # noqa
|
||||
"_Fit__fighters": relation(
|
||||
Fighter,
|
||||
collection_class=HandledDroneCargoList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(fighters_table.c.fitID == fits_table.c.ID, fighters_table.c.projected == False)), # noqa
|
||||
Fighter,
|
||||
collection_class=HandledDroneCargoList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(fighters_table.c.fitID == fits_table.c.ID, fighters_table.c.projected == False)), # noqa
|
||||
"_Fit__cargo": relation(
|
||||
Cargo,
|
||||
collection_class=HandledDroneCargoList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(cargo_table.c.fitID == fits_table.c.ID)),
|
||||
Cargo,
|
||||
collection_class=HandledDroneCargoList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(cargo_table.c.fitID == fits_table.c.ID)),
|
||||
"_Fit__projectedDrones": relation(
|
||||
Drone,
|
||||
collection_class=HandledProjectedDroneList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(drones_table.c.fitID == fits_table.c.ID, drones_table.c.projected == True)), # noqa
|
||||
Drone,
|
||||
collection_class=HandledProjectedDroneList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(drones_table.c.fitID == fits_table.c.ID, drones_table.c.projected == True)), # noqa
|
||||
"_Fit__projectedFighters": relation(
|
||||
Fighter,
|
||||
collection_class=HandledProjectedDroneList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(fighters_table.c.fitID == fits_table.c.ID, fighters_table.c.projected == True)), # noqa
|
||||
Fighter,
|
||||
collection_class=HandledProjectedDroneList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
single_parent=True,
|
||||
primaryjoin=and_(fighters_table.c.fitID == fits_table.c.ID, fighters_table.c.projected == True)), # noqa
|
||||
"_Fit__implants": relation(
|
||||
Implant,
|
||||
collection_class=HandledImplantBoosterList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
backref='fit',
|
||||
single_parent=True,
|
||||
primaryjoin=fitImplants_table.c.fitID == fits_table.c.ID,
|
||||
secondaryjoin=fitImplants_table.c.implantID == Implant.ID,
|
||||
secondary=fitImplants_table),
|
||||
Implant,
|
||||
collection_class=HandledImplantBoosterList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
backref='owner',
|
||||
single_parent=True,
|
||||
primaryjoin=fitImplants_table.c.fitID == fits_table.c.ID,
|
||||
secondaryjoin=fitImplants_table.c.implantID == Implant.ID,
|
||||
secondary=fitImplants_table),
|
||||
"_Fit__character": relation(
|
||||
Character,
|
||||
backref="fits"),
|
||||
Character,
|
||||
backref="fits"),
|
||||
"_Fit__damagePattern": relation(DamagePattern),
|
||||
"_Fit__targetResists": relation(TargetResists),
|
||||
"projectedOnto": relationship(
|
||||
ProjectedFit,
|
||||
primaryjoin=projectedFits_table.c.sourceID == fits_table.c.ID,
|
||||
backref='source_fit',
|
||||
collection_class=attribute_mapped_collection('victimID'),
|
||||
cascade='all, delete, delete-orphan'),
|
||||
"projectedOnto": projectedFitSourceRel,
|
||||
"victimOf": relationship(
|
||||
ProjectedFit,
|
||||
primaryjoin=fits_table.c.ID == projectedFits_table.c.victimID,
|
||||
backref='victim_fit',
|
||||
collection_class=attribute_mapped_collection('sourceID'),
|
||||
cascade='all, delete, delete-orphan'),
|
||||
"boostedOnto": relationship(
|
||||
CommandFit,
|
||||
primaryjoin=commandFits_table.c.boosterID == fits_table.c.ID,
|
||||
backref='booster_fit',
|
||||
collection_class=attribute_mapped_collection('boostedID'),
|
||||
cascade='all, delete, delete-orphan'),
|
||||
ProjectedFit,
|
||||
primaryjoin=fits_table.c.ID == projectedFits_table.c.victimID,
|
||||
backref='victim_fit',
|
||||
collection_class=attribute_mapped_collection('sourceID'),
|
||||
cascade='all, delete, delete-orphan'),
|
||||
"boostedOnto": boostedOntoRel,
|
||||
"boostedOf": relationship(
|
||||
CommandFit,
|
||||
primaryjoin=fits_table.c.ID == commandFits_table.c.boostedID,
|
||||
backref='boosted_fit',
|
||||
collection_class=attribute_mapped_collection('boosterID'),
|
||||
cascade='all, delete, delete-orphan'),
|
||||
CommandFit,
|
||||
primaryjoin=fits_table.c.ID == commandFits_table.c.boostedID,
|
||||
backref='boosted_fit',
|
||||
collection_class=attribute_mapped_collection('boosterID'),
|
||||
cascade='all, delete, delete-orphan'),
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
mapper(ProjectedFit, projectedFits_table,
|
||||
properties={
|
||||
"_ProjectedFit__amount": projectedFits_table.c.amount,
|
||||
}
|
||||
)
|
||||
properties={
|
||||
"_ProjectedFit__amount": projectedFits_table.c.amount,
|
||||
}
|
||||
)
|
||||
|
||||
mapper(CommandFit, commandFits_table)
|
||||
|
||||
@@ -17,16 +17,20 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, Boolean
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, Boolean, DateTime
|
||||
from sqlalchemy.orm import mapper
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.types import Implant
|
||||
from eos.saveddata.implant import Implant
|
||||
|
||||
implants_table = Table("implants", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("itemID", Integer),
|
||||
Column("active", Boolean))
|
||||
Column("active", Boolean),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now)
|
||||
)
|
||||
|
||||
fitImplants_table = Table("fitImplants", saveddata_meta,
|
||||
Column("fitID", ForeignKey("fits.ID"), index=True),
|
||||
|
||||
@@ -17,29 +17,33 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, String
|
||||
from sqlalchemy import Table, Column, Integer, String, DateTime
|
||||
from sqlalchemy.orm import relation, mapper
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.db.saveddata.implant import implantsSetMap_table
|
||||
from eos.effectHandlerHelpers import HandledImplantBoosterList
|
||||
from eos.types import Implant, ImplantSet
|
||||
from eos.saveddata.implant import Implant
|
||||
from eos.saveddata.implantSet import ImplantSet
|
||||
|
||||
implant_set_table = Table("implantSets", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("name", String, nullable=False),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now)
|
||||
)
|
||||
|
||||
mapper(ImplantSet, implant_set_table,
|
||||
properties={
|
||||
"_ImplantSet__implants": relation(
|
||||
Implant,
|
||||
collection_class=HandledImplantBoosterList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
backref='set',
|
||||
single_parent=True,
|
||||
primaryjoin=implantsSetMap_table.c.setID == implant_set_table.c.ID,
|
||||
secondaryjoin=implantsSetMap_table.c.implantID == Implant.ID,
|
||||
secondary=implantsSetMap_table),
|
||||
Implant,
|
||||
collection_class=HandledImplantBoosterList,
|
||||
cascade='all, delete, delete-orphan',
|
||||
backref='set',
|
||||
single_parent=True,
|
||||
primaryjoin=implantsSetMap_table.c.setID == implant_set_table.c.ID,
|
||||
secondaryjoin=implantsSetMap_table.c.implantID == Implant.ID,
|
||||
secondary=implantsSetMap_table),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -26,7 +26,7 @@ class ImportError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DefaultDatabaseValues():
|
||||
class DefaultDatabaseValues(object):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@@ -68,7 +68,7 @@ class DefaultDatabaseValues():
|
||||
["[Hybrid Charges]Uranium", "0", "38.4", "57.6", "0"],
|
||||
["[Missiles]Mjolnir", "100", "0", "0", "0"], ["[Missiles]Inferno", "0", "100", "0", "0"],
|
||||
["[Missiles]Scourge", "0", "0", "100", "0"], ["[Missiles]Nova", "0", "0", "0", "100"],
|
||||
["[Missiles][Structure) Standup Missile", "100", "100", "100", "100"],
|
||||
["[Missiles][Structure] Standup Missile", "100", "100", "100", "100"],
|
||||
["[Projectile Ammo][T2] Tremor", "0", "0", "24", "40"],
|
||||
["[Projectile Ammo][T2] Quake", "0", "0", "40", "72"],
|
||||
["[Projectile Ammo][T2] Hail", "0", "0", "26.4", "96.8"],
|
||||
|
||||
@@ -21,7 +21,7 @@ from sqlalchemy import Column, Table, String
|
||||
from sqlalchemy.orm import mapper
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.types import MiscData
|
||||
from eos.saveddata.miscData import MiscData
|
||||
|
||||
miscdata_table = Table("miscdata", saveddata_meta,
|
||||
Column("fieldName", String, primary_key=True),
|
||||
|
||||
@@ -17,11 +17,13 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, CheckConstraint, Boolean
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, CheckConstraint, Boolean, DateTime
|
||||
from sqlalchemy.orm import relation, mapper
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.types import Module, Fit
|
||||
from eos.saveddata.module import Module
|
||||
from eos.saveddata.fit import Fit
|
||||
|
||||
modules_table = Table("modules", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
@@ -32,6 +34,8 @@ modules_table = Table("modules", saveddata_meta,
|
||||
Column("state", Integer, CheckConstraint("state >= -1"), CheckConstraint("state <= 2")),
|
||||
Column("projected", Boolean, default=False, nullable=False),
|
||||
Column("position", Integer),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now),
|
||||
CheckConstraint('("dummySlot" = NULL OR "itemID" = NULL) AND "dummySlot" != "itemID"'))
|
||||
|
||||
mapper(Module, modules_table,
|
||||
|
||||
@@ -17,15 +17,19 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, Float
|
||||
from sqlalchemy import Table, Column, Integer, Float, DateTime
|
||||
from sqlalchemy.orm import mapper
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.types import Override
|
||||
from eos.saveddata.override import Override
|
||||
|
||||
overrides_table = Table("overrides", saveddata_meta,
|
||||
Column("itemID", Integer, primary_key=True, index=True),
|
||||
Column("attrID", Integer, primary_key=True, index=True),
|
||||
Column("value", Float, nullable=False))
|
||||
Column("value", Float, nullable=False),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now)
|
||||
)
|
||||
|
||||
mapper(Override, overrides_table)
|
||||
|
||||
@@ -21,12 +21,14 @@ from sqlalchemy import Table, Column, Float, Integer
|
||||
from sqlalchemy.orm import mapper
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.types import Price
|
||||
from eos.saveddata.price import Price
|
||||
|
||||
prices_table = Table("prices", saveddata_meta,
|
||||
Column("typeID", Integer, primary_key=True),
|
||||
Column("price", Float),
|
||||
Column("price", Float, default=0.0),
|
||||
Column("time", Integer, nullable=False),
|
||||
Column("failed", Integer))
|
||||
|
||||
mapper(Price, prices_table)
|
||||
mapper(Price, prices_table, properties={
|
||||
"_Price__price": prices_table.c.price,
|
||||
})
|
||||
|
||||
@@ -17,16 +17,26 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
import sys
|
||||
|
||||
from sqlalchemy.sql import and_
|
||||
from sqlalchemy import desc, select
|
||||
|
||||
from eos.db import saveddata_session, sd_lock
|
||||
from eos.db.saveddata.fit import projectedFits_table
|
||||
from eos.db.util import processEager, processWhere
|
||||
from eos.db import saveddata_session, sd_lock
|
||||
from eos.saveddata.price import Price
|
||||
from eos.saveddata.user import User
|
||||
from eos.saveddata.crestchar import CrestChar
|
||||
from eos.saveddata.damagePattern import DamagePattern
|
||||
from eos.saveddata.targetResists import TargetResists
|
||||
from eos.saveddata.character import Character
|
||||
from eos.saveddata.implantSet import ImplantSet
|
||||
from eos.saveddata.fit import Fit
|
||||
from eos.saveddata.module import Module
|
||||
from eos.saveddata.miscData import MiscData
|
||||
from eos.saveddata.override import Override
|
||||
|
||||
from eos.types import *
|
||||
from eos.db.saveddata.fit import projectedFits_table
|
||||
from sqlalchemy.sql import and_
|
||||
import eos.config
|
||||
|
||||
configVal = getattr(eos.config, "saveddataCache", None)
|
||||
@@ -62,6 +72,7 @@ if configVal is True:
|
||||
def checkAndReturn(*args, **kwargs):
|
||||
useCache = kwargs.pop("useCache", True)
|
||||
cacheKey = []
|
||||
items = None
|
||||
cacheKey.extend(args)
|
||||
for keyword in keywords:
|
||||
cacheKey.append(kwargs.get(keyword))
|
||||
@@ -98,9 +109,9 @@ if configVal is True:
|
||||
if type not in queryCache:
|
||||
return
|
||||
functionCache = queryCache[type]
|
||||
for _, localCache in functionCache.iteritems():
|
||||
for _, localCache in functionCache.items():
|
||||
toDelete = set()
|
||||
for cacheKey, info in localCache.iteritems():
|
||||
for cacheKey, info in localCache.items():
|
||||
IDs = info[1]
|
||||
if ID in IDs:
|
||||
toDelete.add(cacheKey)
|
||||
@@ -145,7 +156,7 @@ def getUser(lookfor, eager=None):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
user = saveddata_session.query(User).options(*eager).filter(User.ID == lookfor).first()
|
||||
elif isinstance(lookfor, basestring):
|
||||
elif isinstance(lookfor, str):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
user = saveddata_session.query(User).options(*eager).filter(User.username == lookfor).first()
|
||||
@@ -164,11 +175,11 @@ def getCharacter(lookfor, eager=None):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
character = saveddata_session.query(Character).options(*eager).filter(Character.ID == lookfor).first()
|
||||
elif isinstance(lookfor, basestring):
|
||||
elif isinstance(lookfor, str):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
character = saveddata_session.query(Character).options(*eager).filter(
|
||||
Character.savedName == lookfor).first()
|
||||
Character.savedName == lookfor).first()
|
||||
else:
|
||||
raise TypeError("Need integer or string as argument")
|
||||
return character
|
||||
@@ -234,22 +245,34 @@ def getFitsWithShip(shipID, ownerID=None, where=None, eager=None):
|
||||
return fits
|
||||
|
||||
|
||||
def getBoosterFits(ownerID=None, where=None, eager=None):
|
||||
"""
|
||||
Get all the fits that are flagged as a boosting ship
|
||||
If no user is passed, do this for all users.
|
||||
"""
|
||||
|
||||
if ownerID is not None and not isinstance(ownerID, int):
|
||||
raise TypeError("OwnerID must be integer")
|
||||
filter = Fit.booster == 1
|
||||
if ownerID is not None:
|
||||
filter = and_(filter, Fit.ownerID == ownerID)
|
||||
|
||||
filter = processWhere(filter, where)
|
||||
def getRecentFits(ownerID=None, where=None, eager=None):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
fits = removeInvalid(saveddata_session.query(Fit).options(*eager).filter(filter).all())
|
||||
q = select((
|
||||
Fit.ID,
|
||||
Fit.shipID,
|
||||
Fit.name,
|
||||
Fit.modified,
|
||||
Fit.created,
|
||||
Fit.timestamp,
|
||||
Fit.notes
|
||||
)).order_by(desc(Fit.modified), desc(Fit.timestamp)).limit(50)
|
||||
fits = eos.db.saveddata_session.execute(q).fetchall()
|
||||
|
||||
return fits
|
||||
|
||||
|
||||
def getFitsWithModules(typeIDs, eager=None):
|
||||
"""
|
||||
Get all the fits that have typeIDs fitted to them
|
||||
"""
|
||||
|
||||
if not hasattr(typeIDs, "__iter__"):
|
||||
typeIDs = (typeIDs,)
|
||||
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
fits = removeInvalid(saveddata_session.query(Fit).join(Module).options(*eager).filter(Module.itemID.in_(typeIDs)).all())
|
||||
|
||||
return fits
|
||||
|
||||
@@ -314,7 +337,7 @@ def clearPrices():
|
||||
|
||||
|
||||
def getMiscData(field):
|
||||
if isinstance(field, basestring):
|
||||
if isinstance(field, str):
|
||||
with sd_lock:
|
||||
data = saveddata_session.query(MiscData).get(field)
|
||||
else:
|
||||
@@ -329,6 +352,13 @@ def getDamagePatternList(eager=None):
|
||||
return patterns
|
||||
|
||||
|
||||
def clearDamagePatterns():
|
||||
with sd_lock:
|
||||
deleted_rows = saveddata_session.query(DamagePattern).filter(DamagePattern.name != 'Uniform').delete()
|
||||
commit()
|
||||
return deleted_rows
|
||||
|
||||
|
||||
def getTargetResistsList(eager=None):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
@@ -336,6 +366,13 @@ def getTargetResistsList(eager=None):
|
||||
return patterns
|
||||
|
||||
|
||||
def clearTargetResists():
|
||||
with sd_lock:
|
||||
deleted_rows = saveddata_session.query(TargetResists).delete()
|
||||
commit()
|
||||
return deleted_rows
|
||||
|
||||
|
||||
def getImplantSetList(eager=None):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
@@ -353,12 +390,12 @@ def getDamagePattern(lookfor, eager=None):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
pattern = saveddata_session.query(DamagePattern).options(*eager).filter(
|
||||
DamagePattern.ID == lookfor).first()
|
||||
elif isinstance(lookfor, basestring):
|
||||
DamagePattern.ID == lookfor).first()
|
||||
elif isinstance(lookfor, str):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
pattern = saveddata_session.query(DamagePattern).options(*eager).filter(
|
||||
DamagePattern.name == lookfor).first()
|
||||
DamagePattern.name == lookfor).first()
|
||||
else:
|
||||
raise TypeError("Need integer or string as argument")
|
||||
return pattern
|
||||
@@ -374,12 +411,12 @@ def getTargetResists(lookfor, eager=None):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
pattern = saveddata_session.query(TargetResists).options(*eager).filter(
|
||||
TargetResists.ID == lookfor).first()
|
||||
elif isinstance(lookfor, basestring):
|
||||
TargetResists.ID == lookfor).first()
|
||||
elif isinstance(lookfor, str):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
pattern = saveddata_session.query(TargetResists).options(*eager).filter(
|
||||
TargetResists.name == lookfor).first()
|
||||
TargetResists.name == lookfor).first()
|
||||
else:
|
||||
raise TypeError("Need integer or string as argument")
|
||||
return pattern
|
||||
@@ -395,8 +432,8 @@ def getImplantSet(lookfor, eager=None):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
pattern = saveddata_session.query(ImplantSet).options(*eager).filter(
|
||||
TargetResists.ID == lookfor).first()
|
||||
elif isinstance(lookfor, basestring):
|
||||
TargetResists.ID == lookfor).first()
|
||||
elif isinstance(lookfor, str):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
pattern = saveddata_session.query(ImplantSet).options(*eager).filter(TargetResists.name == lookfor).first()
|
||||
@@ -406,10 +443,10 @@ def getImplantSet(lookfor, eager=None):
|
||||
|
||||
|
||||
def searchFits(nameLike, where=None, eager=None):
|
||||
if not isinstance(nameLike, basestring):
|
||||
if not isinstance(nameLike, str):
|
||||
raise TypeError("Need string as argument")
|
||||
# Prepare our string for request
|
||||
nameLike = u"%{0}%".format(sqlizeString(nameLike))
|
||||
nameLike = "%{0}%".format(sqlizeString(nameLike))
|
||||
|
||||
# Add any extra components to the search to our where clause
|
||||
filter = processWhere(Fit.name.like(nameLike, escape="\\"), where)
|
||||
@@ -447,7 +484,7 @@ def getCrestCharacter(lookfor, eager=None):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
character = saveddata_session.query(CrestChar).options(*eager).filter(CrestChar.ID == lookfor).first()
|
||||
elif isinstance(lookfor, basestring):
|
||||
elif isinstance(lookfor, str):
|
||||
eager = processEager(eager)
|
||||
with sd_lock:
|
||||
character = saveddata_session.query(CrestChar).options(*eager).filter(CrestChar.name == lookfor).first()
|
||||
@@ -478,8 +515,8 @@ def removeInvalid(fits):
|
||||
invalids = [f for f in fits if f.isInvalid]
|
||||
|
||||
if invalids:
|
||||
map(fits.remove, invalids)
|
||||
map(saveddata_session.delete, invalids)
|
||||
list(map(fits.remove, invalids))
|
||||
list(map(saveddata_session.delete, invalids))
|
||||
saveddata_session.commit()
|
||||
|
||||
return fits
|
||||
@@ -504,5 +541,10 @@ def remove(stuff):
|
||||
|
||||
def commit():
|
||||
with sd_lock:
|
||||
saveddata_session.commit()
|
||||
saveddata_session.flush()
|
||||
try:
|
||||
saveddata_session.commit()
|
||||
saveddata_session.flush()
|
||||
except Exception:
|
||||
saveddata_session.rollback()
|
||||
exc_info = sys.exc_info()
|
||||
raise exc_info[0](exc_info[1]).with_traceback(exc_info[2])
|
||||
|
||||
@@ -17,15 +17,20 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, DateTime
|
||||
from sqlalchemy.orm import mapper
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.types import Skill
|
||||
from eos.saveddata.character import Skill
|
||||
|
||||
|
||||
skills_table = Table("characterSkills", saveddata_meta,
|
||||
Column("characterID", ForeignKey("characters.ID"), primary_key=True, index=True),
|
||||
Column("itemID", Integer, primary_key=True),
|
||||
Column("_Skill__level", Integer, nullable=True))
|
||||
Column("_Skill__level", Integer, nullable=True),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now)
|
||||
)
|
||||
|
||||
mapper(Skill, skills_table)
|
||||
|
||||
@@ -17,11 +17,12 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
from sqlalchemy import Table, Column, Integer, Float, ForeignKey, String
|
||||
from sqlalchemy import Table, Column, Integer, Float, ForeignKey, String, DateTime
|
||||
from sqlalchemy.orm import mapper
|
||||
import datetime
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.types import TargetResists
|
||||
from eos.saveddata.targetResists import TargetResists
|
||||
|
||||
targetResists_table = Table("targetResists", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
@@ -30,6 +31,9 @@ targetResists_table = Table("targetResists", saveddata_meta,
|
||||
Column("thermalAmount", Float),
|
||||
Column("kineticAmount", Float),
|
||||
Column("explosiveAmount", Float),
|
||||
Column("ownerID", ForeignKey("users.ID"), nullable=True))
|
||||
Column("ownerID", ForeignKey("users.ID"), nullable=True),
|
||||
Column("created", DateTime, nullable=True, default=datetime.datetime.now),
|
||||
Column("modified", DateTime, nullable=True, onupdate=datetime.datetime.now)
|
||||
)
|
||||
|
||||
mapper(TargetResists, targetResists_table)
|
||||
|
||||
@@ -21,7 +21,7 @@ from sqlalchemy import Table, Column, Integer, String, Boolean
|
||||
from sqlalchemy.orm import mapper
|
||||
|
||||
from eos.db import saveddata_meta
|
||||
from eos.types import User
|
||||
from eos.saveddata.user import User
|
||||
|
||||
users_table = Table("users", saveddata_meta,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
|
||||
@@ -20,16 +20,18 @@
|
||||
from sqlalchemy.orm import eagerload
|
||||
from sqlalchemy.sql import and_
|
||||
|
||||
replace = {"attributes": "_Item__attributes",
|
||||
"modules": "_Fit__modules",
|
||||
"projectedModules": "_Fit__projectedModules",
|
||||
"boosters": "_Fit__boosters",
|
||||
"drones": "_Fit__drones",
|
||||
"projectedDrones": "_Fit__projectedDrones",
|
||||
"implants": "_Fit__implants",
|
||||
"character": "_Fit__character",
|
||||
"damagePattern": "_Fit__damagePattern",
|
||||
"projectedFits": "_Fit__projectedFits"}
|
||||
replace = {
|
||||
"attributes" : "_Item__attributes",
|
||||
"modules" : "_Fit__modules",
|
||||
"projectedModules": "_Fit__projectedModules",
|
||||
"boosters" : "_Fit__boosters",
|
||||
"drones" : "_Fit__drones",
|
||||
"projectedDrones" : "_Fit__projectedDrones",
|
||||
"implants" : "_Fit__implants",
|
||||
"character" : "_Fit__character",
|
||||
"damagePattern" : "_Fit__damagePattern",
|
||||
"projectedFits" : "_Fit__projectedFits"
|
||||
}
|
||||
|
||||
|
||||
def processEager(eager):
|
||||
@@ -37,7 +39,7 @@ def processEager(eager):
|
||||
return tuple()
|
||||
else:
|
||||
l = []
|
||||
if isinstance(eager, basestring):
|
||||
if isinstance(eager, str):
|
||||
eager = (eager,)
|
||||
|
||||
for e in eager:
|
||||
@@ -48,7 +50,7 @@ def processEager(eager):
|
||||
|
||||
def _replacements(eagerString):
|
||||
splitEager = eagerString.split(".")
|
||||
for i in xrange(len(splitEager)):
|
||||
for i in range(len(splitEager)):
|
||||
part = splitEager[i]
|
||||
replacement = replace.get(part)
|
||||
if replacement:
|
||||
|
||||
@@ -17,12 +17,9 @@
|
||||
# along with eos. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ===============================================================================
|
||||
|
||||
# from sqlalchemy.orm.attributes import flag_modified
|
||||
import logging
|
||||
from logbook import Logger
|
||||
|
||||
import eos.db
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
pyfalog = Logger(__name__)
|
||||
|
||||
|
||||
class HandledList(list):
|
||||
@@ -118,7 +115,7 @@ class HandledList(list):
|
||||
class HandledModuleList(HandledList):
|
||||
def append(self, mod):
|
||||
emptyPosition = float("Inf")
|
||||
for i in xrange(len(self)):
|
||||
for i in range(len(self)):
|
||||
currMod = self[i]
|
||||
if currMod.isEmpty and not mod.isEmpty and currMod.slot == mod.slot:
|
||||
currPos = mod.position or i
|
||||
@@ -139,10 +136,6 @@ class HandledModuleList(HandledList):
|
||||
self.remove(mod)
|
||||
return
|
||||
|
||||
# fix for #529, where a module may be in incorrect state after CCP changes mechanics of module
|
||||
if not mod.isValidState(mod.state):
|
||||
mod.state = eos.types.State.ONLINE
|
||||
|
||||
def insert(self, index, mod):
|
||||
mod.position = index
|
||||
i = index
|
||||
@@ -156,13 +149,13 @@ class HandledModuleList(HandledList):
|
||||
oldPos = mod.position
|
||||
|
||||
mod.position = None
|
||||
for i in xrange(oldPos, len(self)):
|
||||
for i in range(oldPos, len(self)):
|
||||
self[i].position -= 1
|
||||
|
||||
def toDummy(self, index):
|
||||
mod = self[index]
|
||||
if not mod.isEmpty:
|
||||
dummy = eos.types.Module.buildEmpty(mod.slot)
|
||||
dummy = mod.buildEmpty(mod.slot)
|
||||
dummy.position = index
|
||||
self[index] = dummy
|
||||
|
||||
@@ -205,7 +198,7 @@ class HandledImplantBoosterList(HandledList):
|
||||
# if needed, remove booster that was occupying slot
|
||||
oldObj = next((m for m in self if m.slot == thing.slot), None)
|
||||
if oldObj:
|
||||
logging.info("Slot %d occupied with %s, replacing with %s", thing.slot, oldObj.item.name, thing.item.name)
|
||||
pyfalog.info("Slot {0} occupied with {1}, replacing with {2}", thing.slot, oldObj.item.name, thing.item.name)
|
||||
oldObj.itemID = 0 # hack to remove from DB. See GH issue #324
|
||||
self.remove(oldObj)
|
||||
|
||||
@@ -229,7 +222,7 @@ class HandledProjectedModList(HandledList):
|
||||
oldEffect = next((m for m in self if m.item.group.name == "Effect Beacon"), None)
|
||||
|
||||
if oldEffect:
|
||||
logging.info("System effect occupied with %s, replacing with %s", oldEffect.item.name, proj.item.name)
|
||||
pyfalog.info("System effect occupied with {0}, replacing with {1}", oldEffect.item.name, proj.item.name)
|
||||
self.remove(oldEffect)
|
||||
|
||||
HandledList.append(self, proj)
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
#
|
||||
# Used by:
|
||||
# Modules named like: Memetic Algorithm Bank (8 of 8)
|
||||
# Implant: Neural Lace 'Blackglass' Net Intrusion 920-40
|
||||
# Implant: Poteque 'Prospector' Environmental Analysis EY-1005
|
||||
# Implant: Poteque 'Prospector' Hacking HC-905
|
||||
type = "passive"
|
||||
|
||||
@@ -2,9 +2,10 @@
|
||||
#
|
||||
# Used by:
|
||||
# Module: Reactive Armor Hardener
|
||||
import logging
|
||||
from logbook import Logger
|
||||
import eos.config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
pyfalog = Logger(__name__)
|
||||
|
||||
runTime = "late"
|
||||
type = "active"
|
||||
@@ -13,10 +14,14 @@ type = "active"
|
||||
def handler(fit, module, context):
|
||||
damagePattern = fit.damagePattern
|
||||
|
||||
static_adaptive_behavior = eos.config.settings['useStaticAdaptiveArmorHardener']
|
||||
|
||||
if (damagePattern.emAmount == damagePattern.thermalAmount == damagePattern.kineticAmount == damagePattern.explosiveAmount) and static_adaptive_behavior:
|
||||
pyfalog.debug("Setting adaptivearmorhardener resists to uniform profile.")
|
||||
return
|
||||
|
||||
# Skip if there is no damage pattern. Example: projected ships or fleet boosters
|
||||
if damagePattern:
|
||||
# logger.debug("Damage Pattern: %f/%f/%f/%f", damagePattern.emAmount, damagePattern.thermalAmount, damagePattern.kineticAmount, damagePattern.explosiveAmount)
|
||||
# logger.debug("Original Armor Resists: %f/%f/%f/%f", fit.ship.getModifiedItemAttr('armorEmDamageResonance'), fit.ship.getModifiedItemAttr('armorThermalDamageResonance'), fit.ship.getModifiedItemAttr('armorKineticDamageResonance'), fit.ship.getModifiedItemAttr('armorExplosiveDamageResonance'))
|
||||
|
||||
# Populate a tuple with the damage profile modified by current armor resists.
|
||||
baseDamageTaken = (
|
||||
@@ -25,7 +30,7 @@ def handler(fit, module, context):
|
||||
damagePattern.kineticAmount * fit.ship.getModifiedItemAttr('armorKineticDamageResonance'),
|
||||
damagePattern.explosiveAmount * fit.ship.getModifiedItemAttr('armorExplosiveDamageResonance'),
|
||||
)
|
||||
# logger.debug("Damage Adjusted for Armor Resists: %f/%f/%f/%f", baseDamageTaken[0], baseDamageTaken[1], baseDamageTaken[2], baseDamageTaken[3])
|
||||
# pyfalog.debug("Damage Adjusted for Armor Resists: %f/%f/%f/%f", baseDamageTaken[0], baseDamageTaken[1], baseDamageTaken[2], baseDamageTaken[3])
|
||||
|
||||
resistanceShiftAmount = module.getModifiedItemAttr(
|
||||
'resistanceShiftAmount') / 100 # The attribute is in percent and we want a fraction
|
||||
@@ -41,7 +46,7 @@ def handler(fit, module, context):
|
||||
cycleList = []
|
||||
loopStart = -20
|
||||
for num in range(50):
|
||||
# logger.debug("Starting cycle %d.", num)
|
||||
# pyfalog.debug("Starting cycle %d.", num)
|
||||
# The strange order is to emulate the ingame sorting when different types have taken the same amount of damage.
|
||||
# This doesn't take into account stacking penalties. In a few cases fitting a Damage Control causes an inaccurate result.
|
||||
damagePattern_tuples = [
|
||||
@@ -50,7 +55,6 @@ def handler(fit, module, context):
|
||||
(2, baseDamageTaken[2] * RAHResistance[2], RAHResistance[2]),
|
||||
(1, baseDamageTaken[1] * RAHResistance[1], RAHResistance[1]),
|
||||
]
|
||||
# logger.debug("Damage taken this cycle: %f/%f/%f/%f", damagePattern_tuples[0][1], damagePattern_tuples[3][1], damagePattern_tuples[2][1], damagePattern_tuples[1][1])
|
||||
|
||||
# Sort the tuple to drop the highest damage value to the bottom
|
||||
sortedDamagePattern_tuples = sorted(damagePattern_tuples, key=lambda damagePattern: damagePattern[1])
|
||||
@@ -80,7 +84,7 @@ def handler(fit, module, context):
|
||||
RAHResistance[sortedDamagePattern_tuples[1][0]] = sortedDamagePattern_tuples[1][2] + change1
|
||||
RAHResistance[sortedDamagePattern_tuples[2][0]] = sortedDamagePattern_tuples[2][2] + change2
|
||||
RAHResistance[sortedDamagePattern_tuples[3][0]] = sortedDamagePattern_tuples[3][2] + change3
|
||||
# logger.debug("Resistances shifted to %f/%f/%f/%f", RAHResistance[0], RAHResistance[1], RAHResistance[2], RAHResistance[3])
|
||||
# pyfalog.debug("Resistances shifted to %f/%f/%f/%f", RAHResistance[0], RAHResistance[1], RAHResistance[2], RAHResistance[3])
|
||||
|
||||
# See if the current RAH profile has been encountered before, indicating a loop.
|
||||
for i, val in enumerate(cycleList):
|
||||
@@ -90,7 +94,7 @@ def handler(fit, module, context):
|
||||
abs(RAHResistance[2] - val[2]) <= tolerance and \
|
||||
abs(RAHResistance[3] - val[3]) <= tolerance:
|
||||
loopStart = i
|
||||
# logger.debug("Loop found: %d-%d", loopStart, num)
|
||||
# pyfalog.debug("Loop found: %d-%d", loopStart, num)
|
||||
break
|
||||
if loopStart >= 0:
|
||||
break
|
||||
@@ -98,7 +102,7 @@ def handler(fit, module, context):
|
||||
cycleList.append(list(RAHResistance))
|
||||
|
||||
if loopStart < 0:
|
||||
logger.error("Reactive Armor Hardener failed to find equilibrium. Damage profile after armor: %f/%f/%f/%f",
|
||||
pyfalog.error("Reactive Armor Hardener failed to find equilibrium. Damage profile after armor: {0}/{1}/{2}/{3}",
|
||||
baseDamageTaken[0], baseDamageTaken[1], baseDamageTaken[2], baseDamageTaken[3])
|
||||
|
||||
# Average the profiles in the RAH loop, or the last 20 if it didn't find a loop.
|
||||
@@ -113,7 +117,7 @@ def handler(fit, module, context):
|
||||
average[i] = round(average[i] / numCycles, 3)
|
||||
|
||||
# Set the new resistances
|
||||
# logger.debug("Setting new resist profile: %f/%f/%f/%f", average[0], average[1], average[2],average[3])
|
||||
# pyfalog.debug("Setting new resist profile: %f/%f/%f/%f", average[0], average[1], average[2],average[3])
|
||||
for i, attr in enumerate((
|
||||
'armorEmDamageResonance', 'armorThermalDamageResonance', 'armorKineticDamageResonance',
|
||||
'armorExplosiveDamageResonance')):
|
||||
|
||||
9
eos/effects/agilitybonus.py
Normal file
9
eos/effects/agilitybonus.py
Normal file
@@ -0,0 +1,9 @@
|
||||
# agilityBonus
|
||||
#
|
||||
# Used by:
|
||||
# Subsystems named like: Propulsion Interdiction Nullifier (4 of 4)
|
||||
type = "passive"
|
||||
|
||||
|
||||
def handler(fit, src, context):
|
||||
fit.ship.increaseItemAttr("agility", src.getModifiedItemAttr("agilityBonusAdd"))
|
||||
@@ -1,9 +1,7 @@
|
||||
# ammoInfluenceCapNeed
|
||||
#
|
||||
# Used by:
|
||||
# Items from category: Charge (465 of 899)
|
||||
# Charges from group: Frequency Crystal (185 of 185)
|
||||
# Charges from group: Hybrid Charge (209 of 209)
|
||||
# Items from category: Charge (478 of 925)
|
||||
type = "passive"
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# ammoInfluenceRange
|
||||
#
|
||||
# Used by:
|
||||
# Items from category: Charge (571 of 899)
|
||||
# Items from category: Charge (572 of 925)
|
||||
type = "passive"
|
||||
|
||||
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
# ammoSpeedMultiplier
|
||||
#
|
||||
# Used by:
|
||||
# Charges from group: Festival Charges (9 of 9)
|
||||
# Charges from group: Festival Charges (23 of 23)
|
||||
# Charges from group: Interdiction Probe (2 of 2)
|
||||
# Charges from group: Survey Probe (3 of 3)
|
||||
type = "passive"
|
||||
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
# Charges from group: Advanced Blaster Charge (8 of 8)
|
||||
# Charges from group: Advanced Pulse Laser Crystal (8 of 8)
|
||||
# Charges from group: Advanced Railgun Charge (8 of 8)
|
||||
# Charges from group: Projectile Ammo (129 of 129)
|
||||
# Charges from group: Projectile Ammo (128 of 128)
|
||||
type = "passive"
|
||||
|
||||
|
||||
|
||||
@@ -6,4 +6,4 @@ type = "passive"
|
||||
|
||||
|
||||
def handler(fit, module, context):
|
||||
fit.ship.increaseItemAttr("warmScrambleStatus", module.getModifiedItemAttr("warpScrambleStrength"))
|
||||
fit.ship.increaseItemAttr("warpScrambleStatus", module.getModifiedItemAttr("warpScrambleStrength"))
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
# armorHPBonusAddPassive
|
||||
#
|
||||
# Used by:
|
||||
# Subsystems from group: Defensive Systems (16 of 16)
|
||||
# Subsystems from group: Defensive Systems (9 of 12)
|
||||
type = "passive"
|
||||
|
||||
|
||||
def handler(fit, module, context):
|
||||
fit.ship.increaseItemAttr("armorHP", module.getModifiedItemAttr("armorHPBonusAdd"))
|
||||
fit.ship.increaseItemAttr("armorHP", module.getModifiedItemAttr("armorHPBonusAdd") or 0)
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
# Used by:
|
||||
# Modules from group: Armor Coating (202 of 202)
|
||||
# Modules from group: Armor Plating Energized (187 of 187)
|
||||
# Modules named like: QA Multiship Module Players (4 of 4)
|
||||
type = "passive"
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# armorRepairAmountBonusSubcap
|
||||
#
|
||||
# Used by:
|
||||
# Implants named like: Grade Asklepian (15 of 16)
|
||||
# Implants named like: grade Asklepian (15 of 18)
|
||||
type = "passive"
|
||||
|
||||
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
# boosterArmorHpPenalty
|
||||
#
|
||||
# Used by:
|
||||
# Implants from group: Booster (12 of 48)
|
||||
# Implants from group: Booster (12 of 54)
|
||||
type = "boosterSideEffect"
|
||||
activeByDefault = False
|
||||
|
||||
# User-friendly name for the side effect
|
||||
displayName = "Armor Capacity"
|
||||
|
||||
# Attribute that this effect targets
|
||||
attr = "boosterArmorHPPenalty"
|
||||
|
||||
|
||||
def handler(fit, booster, context):
|
||||
fit.ship.boostItemAttr("armorHP", booster.getModifiedItemAttr("boosterArmorHPPenalty"))
|
||||
fit.ship.boostItemAttr("armorHP", booster.getModifiedItemAttr(attr))
|
||||
|
||||
@@ -5,9 +5,14 @@
|
||||
# Implants named like: Mindflood Booster (3 of 4)
|
||||
# Implants named like: Sooth Sayer Booster (3 of 4)
|
||||
type = "boosterSideEffect"
|
||||
activeByDefault = False
|
||||
|
||||
# User-friendly name for the side effect
|
||||
displayName = "Armor Repair Amount"
|
||||
|
||||
# Attribute that this effect targets
|
||||
attr = "boosterArmorRepairAmountPenalty"
|
||||
|
||||
|
||||
def handler(fit, booster, context):
|
||||
fit.modules.filteredItemBoost(lambda mod: mod.item.group.name == "Armor Repair Unit",
|
||||
"armorDamageAmount", booster.getModifiedItemAttr("boosterArmorRepairAmountPenalty"))
|
||||
"armorDamageAmount", booster.getModifiedItemAttr(attr))
|
||||
|
||||
@@ -4,8 +4,13 @@
|
||||
# Implants named like: Blue Pill Booster (3 of 5)
|
||||
# Implants named like: Exile Booster (3 of 4)
|
||||
type = "boosterSideEffect"
|
||||
activeByDefault = False
|
||||
|
||||
# User-friendly name for the side effect
|
||||
displayName = "Cap Capacity"
|
||||
|
||||
# Attribute that this effect targets
|
||||
attr = "boosterCapacitorCapacityPenalty"
|
||||
|
||||
|
||||
def handler(fit, booster, context):
|
||||
fit.ship.boostItemAttr("capacitorCapacity", booster.getModifiedItemAttr("boosterCapacitorCapacityPenalty"))
|
||||
fit.ship.boostItemAttr("capacitorCapacity", booster.getModifiedItemAttr(attr))
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
# boosterMaxVelocityPenalty
|
||||
#
|
||||
# Used by:
|
||||
# Implants from group: Booster (12 of 48)
|
||||
# Implants from group: Booster (12 of 54)
|
||||
type = "boosterSideEffect"
|
||||
activeByDefault = False
|
||||
|
||||
# User-friendly name for the side effect
|
||||
displayName = "Velocity"
|
||||
|
||||
# Attribute that this effect targets
|
||||
attr = "boosterMaxVelocityPenalty"
|
||||
|
||||
|
||||
def handler(fit, booster, context):
|
||||
fit.ship.boostItemAttr("maxVelocity", booster.getModifiedItemAttr("boosterMaxVelocityPenalty"))
|
||||
fit.ship.boostItemAttr("maxVelocity", booster.getModifiedItemAttr(attr))
|
||||
|
||||
@@ -4,9 +4,14 @@
|
||||
# Implants named like: Exile Booster (3 of 4)
|
||||
# Implants named like: Mindflood Booster (3 of 4)
|
||||
type = "boosterSideEffect"
|
||||
activeByDefault = False
|
||||
|
||||
# User-friendly name for the side effect
|
||||
displayName = "Missile Explosion Radius"
|
||||
|
||||
# Attribute that this effect targets
|
||||
attr = "boosterMissileAOECloudPenalty"
|
||||
|
||||
|
||||
def handler(fit, booster, context):
|
||||
fit.modules.filteredChargeBoost(lambda mod: mod.charge.requiresSkill("Missile Launcher Operation"),
|
||||
"aoeCloudSize", booster.getModifiedItemAttr("boosterMissileAOECloudPenalty"))
|
||||
"aoeCloudSize", booster.getModifiedItemAttr(attr))
|
||||
|
||||
@@ -3,9 +3,14 @@
|
||||
# Used by:
|
||||
# Implants named like: Blue Pill Booster (3 of 5)
|
||||
type = "boosterSideEffect"
|
||||
activeByDefault = False
|
||||
|
||||
# User-friendly name for the side effect
|
||||
displayName = "Missile Explosion Velocity"
|
||||
|
||||
# Attribute that this effect targets
|
||||
attr = "boosterAOEVelocityPenalty"
|
||||
|
||||
|
||||
def handler(fit, booster, context):
|
||||
fit.modules.filteredChargeBoost(lambda mod: mod.charge.requiresSkill("Missile Launcher Operation"),
|
||||
"aoeVelocity", booster.getModifiedItemAttr("boosterAOEVelocityPenalty"))
|
||||
"aoeVelocity", booster.getModifiedItemAttr(attr))
|
||||
|
||||
@@ -4,9 +4,14 @@
|
||||
# Implants named like: Crash Booster (3 of 4)
|
||||
# Implants named like: X Instinct Booster (3 of 4)
|
||||
type = "boosterSideEffect"
|
||||
activeByDefault = False
|
||||
|
||||
# User-friendly name for the side effect
|
||||
displayName = "Missile Velocity"
|
||||
|
||||
# Attribute that this effect targets
|
||||
attr = "boosterMissileVelocityPenalty"
|
||||
|
||||
|
||||
def handler(fit, booster, context):
|
||||
fit.modules.filteredChargeBoost(lambda mod: mod.charge.requiresSkill("Missile Launcher Operation"),
|
||||
"maxVelocity", "boosterMissileVelocityPenalty")
|
||||
"maxVelocity", booster.getModifiedItemAttr(attr))
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user