Compare commits
173 Commits
v2024.12.0
...
main
| Author | SHA1 | Date |
|---|---|---|
|
|
4a1f1c844d | |
|
|
536866364c | |
|
|
7c33dd231d | |
|
|
96d1cf6d78 | |
|
|
b69b0d2f15 | |
|
|
18506dbaf3 | |
|
|
ca43b3dad5 | |
|
|
d06bfd9d10 | |
|
|
e354ea4d55 | |
|
|
8f3b0eb186 | |
|
|
2823dff56a | |
|
|
c42a505e49 | |
|
|
0576f93477 | |
|
|
65c406a03d | |
|
|
9e2a1dc7a1 | |
|
|
171738fa4d | |
|
|
09f9aa313c | |
|
|
451e1a63aa | |
|
|
733f6efbff | |
|
|
0d753aa3cd | |
|
|
73eff21dbb | |
|
|
37661d91eb | |
|
|
885797db01 | |
|
|
29dbdce95e | |
|
|
f5740c735c | |
|
|
bc8d621152 | |
|
|
5cf2979f38 | |
|
|
08ce7c65a0 | |
|
|
61a3545dd7 | |
|
|
05b46203ab | |
|
|
ddd1df7957 | |
|
|
ab77840bea | |
|
|
27420946cd | |
|
|
5dde1a1def | |
|
|
b36d73306d | |
|
|
f7365c0340 | |
|
|
84bada9967 | |
|
|
bb3fb692df | |
|
|
694e5a95d1 | |
|
|
56cbf51129 | |
|
|
a5efd6e992 | |
|
|
23d85de155 | |
|
|
433813115f | |
|
|
559b95819a | |
|
|
e88ed99d6b | |
|
|
5544df9bd7 | |
|
|
7dab4d4890 | |
|
|
406067497e | |
|
|
0dd3077eb5 | |
|
|
16e1994ed1 | |
|
|
c4cfa40577 | |
|
|
3a4ce2311e | |
|
|
2e35ea9c13 | |
|
|
b594bd2007 | |
|
|
58fcff8618 | |
|
|
3daca5336e | |
|
|
069cff2402 | |
|
|
6bf3df0418 | |
|
|
ca85e454f9 | |
|
|
f5e5f4eb77 | |
|
|
0de0eb5c08 | |
|
|
c667c70a64 | |
|
|
29c3f37a65 | |
|
|
74e24e6249 | |
|
|
550ab3f3f6 | |
|
|
d922bbe06f | |
|
|
9855c64d81 | |
|
|
3353a09db1 | |
|
|
8ef31c4bab | |
|
|
00c6157d2e | |
|
|
6c72b35b29 | |
|
|
30d8e84833 | |
|
|
e11be69f00 | |
|
|
4d23503cee | |
|
|
912bd77589 | |
|
|
18d2484a60 | |
|
|
7d55c87bab | |
|
|
3feb5f3a77 | |
|
|
e49945ddbc | |
|
|
18ff158b1d | |
|
|
5e3ec38ea7 | |
|
|
bfd1cc34d2 | |
|
|
0e644e913f | |
|
|
8f42aaaa9a | |
|
|
49a65ed685 | |
|
|
e5faaaacad | |
|
|
edce99c431 | |
|
|
846411f550 | |
|
|
f087d0c19b | |
|
|
25ee4bd106 | |
|
|
1a7de2f5a0 | |
|
|
68ac600361 | |
|
|
c65848658e | |
|
|
3bc4cbe8bd | |
|
|
acafe6fe49 | |
|
|
d6b6b6d805 | |
|
|
8ad157d17d | |
|
|
0ef0a512d8 | |
|
|
eddbb005ad | |
|
|
1998b428f2 | |
|
|
0b1cc99558 | |
|
|
39506b3afc | |
|
|
415f74fe3e | |
|
|
1a62e483ae | |
|
|
68892d2d47 | |
|
|
67e14de804 | |
|
|
29fcb74896 | |
|
|
fd2397bf66 | |
|
|
9620747e12 | |
|
|
35dc6e75ce | |
|
|
d0a189b68a | |
|
|
a19cf0271c | |
|
|
fa51e1d97a | |
|
|
589a5d1fd1 | |
|
|
23c708d5c1 | |
|
|
d8f33da253 | |
|
|
df16f76c6b | |
|
|
8869b7226f | |
|
|
79b3eaea6f | |
|
|
7d90b7d833 | |
|
|
ba86c76ad6 | |
|
|
3f8424415e | |
|
|
f6dba9a098 | |
|
|
bb796f6933 | |
|
|
168c5c823f | |
|
|
10b812d7f0 | |
|
|
0e125a3c19 | |
|
|
cb5f6010b3 | |
|
|
b5783838ff | |
|
|
2f47b77e23 | |
|
|
8e9a26cffb | |
|
|
3d9f28d6a0 | |
|
|
72c8773263 | |
|
|
487115c15e | |
|
|
ea5f40094a | |
|
|
e1f60445a2 | |
|
|
73e93a3dc6 | |
|
|
8e02ad1477 | |
|
|
9dfc063e23 | |
|
|
d29355d021 | |
|
|
92103d0f65 | |
|
|
c00f22c2d7 | |
|
|
12ea528863 | |
|
|
96d4f3dfef | |
|
|
1303ee7c51 | |
|
|
b8f1193143 | |
|
|
0afa789e6c | |
|
|
b931297a06 | |
|
|
28bbd885af | |
|
|
0422624c5f | |
|
|
8522a1ef70 | |
|
|
61f9e8bc04 | |
|
|
b540916a53 | |
|
|
8342923def | |
|
|
5c97c721ad | |
|
|
d8e2d8eb77 | |
|
|
c46b184627 | |
|
|
7df8b3a519 | |
|
|
d65751df48 | |
|
|
d0771ad490 | |
|
|
3334aa662f | |
|
|
69b7914d9e | |
|
|
d74c3b9923 | |
|
|
c6015c9ada | |
|
|
0f5e215d78 | |
|
|
35c061cd10 | |
|
|
eab93ebdf8 | |
|
|
5e7fd37a86 | |
|
|
9ad0b8438b | |
|
|
e0d3d4e078 | |
|
|
6c058c8114 | |
|
|
03d12d33d9 | |
|
|
90dc1b1a43 |
|
|
@ -17,8 +17,11 @@ logs/gateio.log
|
|||
logs/kucoin.log
|
||||
upload_testnet.sh
|
||||
upload_mainnet.sh
|
||||
upload_local_testnet.sh
|
||||
utils/data/binance.db
|
||||
utils/data/okx.db
|
||||
utils/data/gateio.db
|
||||
utils/data/kucoin.db
|
||||
utils/data/test.py
|
||||
utils/data/bybit.db
|
||||
utils/data/test.py
|
||||
profits/db_read.py
|
||||
|
|
@ -0,0 +1,622 @@
|
|||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
253
changelog.txt
253
changelog.txt
|
|
@ -1,3 +1,256 @@
|
|||
2025.12.01:
|
||||
. Modified log output of new_market_order.
|
||||
. Modified Kucoin's case in min_amount_of_base.
|
||||
|
||||
2025.11.11:
|
||||
. deals_cache and log_list cache are now 20 items long.
|
||||
. Less log spam.
|
||||
|
||||
2025.11.08:
|
||||
. broker.set_default_order_size() now saves the config file to disk after changing the value.
|
||||
. Variable renaming and other small stuff.
|
||||
|
||||
2025.10.24:
|
||||
. Toggling liquidate_after_switch now writes the config file to disk so the setting persists between trades.
|
||||
. Manually switching to long now sets double_check_price to false.
|
||||
. Added a few comments to switch_to_long.
|
||||
|
||||
2025.10.12:
|
||||
. do_cleanup relocated after generating the safety orders' prices.
|
||||
|
||||
2025.10.11:
|
||||
. Minor simplification in do_cleanup.
|
||||
. Removed a couple of (no longer needed?) pauses.
|
||||
|
||||
2025.10.10:
|
||||
. New endpoint: /refresh_log_cache.
|
||||
. Fixed an error in /add_so endpoint that incremented the config setting but not the status setting.
|
||||
|
||||
2025.10.09:
|
||||
. Cleanup is done as soon as the trader starts, rather than after sending the take profit and safety orders.
|
||||
|
||||
2025.10.07:
|
||||
. In short traders, if there are too few safety orders (less than 67% of the max amount), safety_order_deviance is increased from 2% to 3%.
|
||||
|
||||
2025.10.04:
|
||||
. Fixed error while logging orders in new_simulated_market_order.
|
||||
. renew_tp_and_so_routine now send the take profit order first, and then the safety orders.
|
||||
|
||||
2025.10.03:
|
||||
. New broker config option: log_orders. If set to True, the orders will be logged in orders.log under logs directory.
|
||||
. New API endpoint: /toggle_log_orders.
|
||||
|
||||
2025.10.01:
|
||||
. Fixed base fees not being taken into account.
|
||||
|
||||
2025.09.27:
|
||||
. Added notes in every entry of deal_order_history.
|
||||
. Minor refactor in renew_tp_and_so_routine.
|
||||
. Added another cooldown before sending a take profit order (To give the exchange a bit more time to reflect correctly the amount of base present in the account)
|
||||
. Updated cleanup routine to leave some change in the account.
|
||||
|
||||
2025.09.25:
|
||||
. Added a pause after getting filled orders in check_status.
|
||||
. Added an extra logging line in take_profit_routine.
|
||||
|
||||
2025.09.24:
|
||||
. Added a new config option: wait_after_initial_market_order. If specifies in seconds the amount of wait time after sending the initial market order.
|
||||
It should help the exchanges to report correctly the recently filled market order.
|
||||
. Removed the "PAUSED" notice in the screen output that was unused.
|
||||
|
||||
2025.09.21:
|
||||
. Fixed a bug that caused short traders to have an incorrect order size.
|
||||
|
||||
2025.09.20:
|
||||
. Fixed bug that caused short traders to initialize using the same workflow as a long one.
|
||||
|
||||
2025.09.19:
|
||||
. Added pageSize parameter to the open order requests when querying Kucoin.
|
||||
|
||||
2025.09.18:
|
||||
. do_cleanup now uses get_min_quote_size.
|
||||
. Added an extra price check to switch_to_long.
|
||||
. Removed old check_old_long_price method.
|
||||
|
||||
2025.09.14:
|
||||
. Refactored full order list fetching.
|
||||
. Minor refactor of restart_pair_no_json.
|
||||
. Pausing the trader is now done via set_pause() method.
|
||||
. Reverted modification of wait time after initial market order.
|
||||
. wait_time now present in broker config file.
|
||||
. Minor refactorings.
|
||||
|
||||
2025.09.13:
|
||||
. Increased wait time after initial market order.
|
||||
|
||||
2025.09.12:
|
||||
. No retries when sending a cleanup order.
|
||||
. Removed redundant try...except blocks in switch_to_long.
|
||||
|
||||
2025.09.11:
|
||||
. Fixed bug in start_trader that called amount_to_precision with very low amounts and spammed logs.
|
||||
|
||||
2025.09.10:
|
||||
. Deal order history now stores only the id of each order instead of the full order object.
|
||||
|
||||
2025.09.08:
|
||||
. Re-enabled long to short autoswitch.
|
||||
|
||||
2025.09.07:
|
||||
. Increased wait time after sending market orders.
|
||||
|
||||
2025.09.05:
|
||||
. Now the trader supports multiple safety orders at the same time.
|
||||
. Removed forcing orders when importing a trader. Maybe it will be reinstated at a later date.
|
||||
. Removed endpoint /reload_safety_orders.
|
||||
. New endpoints: /mod_concurrent_safety orders, /mod_boosted_concurrent_safety_orders and /force_trader_close.
|
||||
. Modified cleanup routine.
|
||||
. Default wait_time back to 0.5 seconds.
|
||||
. General optimizations.
|
||||
|
||||
2025.08.19:
|
||||
. Improved log trimming.
|
||||
|
||||
2025.08.18:
|
||||
. Database handling optimization.
|
||||
|
||||
2025.08.17:
|
||||
. Minor refactorings.
|
||||
|
||||
2025.08.16:
|
||||
. Improved threading.
|
||||
|
||||
2025.08.15:
|
||||
. "deal order history" is now disabled by default.
|
||||
. CPU optimizations in status string generation.
|
||||
|
||||
2025.08.14:
|
||||
. Refactored gib_so_size.
|
||||
. Refactored seconds_to_time.
|
||||
. Refactored linear_space.
|
||||
. Refactored dca_cost_calculator.
|
||||
. Refactored return_optimal_order_size.
|
||||
. Minor refactor in generate_status_strings.
|
||||
. Optimized imports.
|
||||
. Deal_order_history now only stores the important parts of the orders to save some RAM.
|
||||
. Removed deprecated "profit_to_file" method.
|
||||
|
||||
2025.08.12:
|
||||
. Default "check_slippage" value now True.
|
||||
. Removed capitalization from exchange name when sending trader quit notification.
|
||||
. Exception handling when sending Telegram messages.
|
||||
|
||||
2025.08.10:
|
||||
. Added exchange name to the trader quit notification.
|
||||
. New endpoint: mod_default_order_size. It modifies the default order size of a broker.
|
||||
. Added "generated_at" field to any new generated trader config file.
|
||||
|
||||
2025.07.21:
|
||||
. Corrected an error in switch_to_long.
|
||||
|
||||
2025.07.18:
|
||||
. Changed Waitress' logging level to ERROR.
|
||||
|
||||
2025.07.11:
|
||||
. Default "check_slippage" value now False.
|
||||
. The default order size is now enforced per exchange.
|
||||
. Reinstated config reloading when a deal is closed.
|
||||
. New endpoint: mod_order_size.
|
||||
|
||||
2025.06.04:
|
||||
. Added "base_add_calculation" endpoint: Calculates how many safety orders can be added with the amount of base currency available on the exchange.
|
||||
|
||||
2025.05.31:
|
||||
. Added "liquidate after switch": Once the short trader is ready to switch to long, it liquidates the base currency and shuts down the trader.
|
||||
|
||||
2025.05.27:
|
||||
. Commented out parameter validation in config and status handlers.
|
||||
|
||||
2025.05.26:
|
||||
. Added closed order request wrappers in exchange_wrapper.
|
||||
. In check_status: autoswitch prior to order check.
|
||||
|
||||
2025.05.18:
|
||||
. In start_trader, missing base is calculated using amount_to_precision.
|
||||
|
||||
2025.05.16:
|
||||
. Added exception handling when removing a trader.
|
||||
. Minor variable renaming.
|
||||
|
||||
2025.05.12:
|
||||
. Improved config file handling when switching to long or short.
|
||||
. If the config file does not exist, it creates it.
|
||||
|
||||
2025.05.05:
|
||||
. Forced Telegram message when quit flag is raised.
|
||||
|
||||
2025.05.04:
|
||||
. Simplified unwrapped_add_pair.
|
||||
|
||||
2025.04.10:
|
||||
. Modified Gate.io min_base_size.
|
||||
|
||||
2025.04.09:
|
||||
. Modified default retries to 5.
|
||||
|
||||
2025.04.08:
|
||||
. Updated Binance, KuCoin and Gate.io min_base_size.
|
||||
|
||||
2025.04.04:
|
||||
. Added validate_market method to the broker object.
|
||||
|
||||
2025.03.29:
|
||||
. Enabling last_call disables autoswitch.
|
||||
|
||||
2025.03.19:
|
||||
. Added the possibility to force specific orders when importing a trader.
|
||||
|
||||
2025.03.07:
|
||||
. Error fix in take_profit_routine.
|
||||
|
||||
2025.03.04:
|
||||
. Error fix in add_quote.
|
||||
. Error fix in last_call.
|
||||
. Fixed a bug in switch_quote_currency that prevented the new config and status files to be written to disk.
|
||||
|
||||
2025.03.03:
|
||||
. Replaced more variables with their respective config handlers.
|
||||
. Added a new API endpoint: reload_trader_config.
|
||||
. Removed the config reloading when a trader closes a deal.
|
||||
|
||||
2025.03.02:
|
||||
. Fixed an error in restart_pair_no_json()
|
||||
|
||||
2025.03.01:
|
||||
. StatusHandler initial implementation.
|
||||
. Variable cleanup
|
||||
|
||||
2025.02.27
|
||||
. ConfigHandler: centralized configuration handling in an object, for easier future development (parameter validation, for example)
|
||||
. Bugfixes everywhere.
|
||||
. Exchange_wrapper now validates every symbol against the market information from the exchange, both when adding and importing a trader.
|
||||
|
||||
2025.02.02:
|
||||
. new_so_routine now cancels the old take profit order after the new safety order is sent.
|
||||
|
||||
2025.01.31:
|
||||
. In check_status, when safety order = None it does not return 1 anymore.
|
||||
|
||||
2025.01.10:
|
||||
. Added is_paused to status_dict.
|
||||
|
||||
2024.12.14:
|
||||
. Modified waitress parameters.
|
||||
|
||||
2024.12.07:
|
||||
. Switch to a proper WSGI server (waitress)
|
||||
|
||||
2024.12.04:
|
||||
. Proper missing bak file handling on switch_to_long.
|
||||
|
||||
2024.12.02:
|
||||
. New endpoint: /get_balance.
|
||||
|
||||
2024.12.01:
|
||||
. Added "generated_at" entry: When generating a config file, the generated timestamp is saved in the config file.
|
||||
. If the switch price is lower than the next SO price, it displays it in green instead of the next SO price.
|
||||
|
|
|
|||
|
|
@ -0,0 +1,375 @@
|
|||
from time import time
|
||||
from json import dumps, load
|
||||
|
||||
class ConfigHandler:
|
||||
'''
|
||||
Handles the configuration of the trader and the validation of the parameters
|
||||
'''
|
||||
|
||||
def __init__(self, pair, broker, config_dict = None):
|
||||
self.broker = broker
|
||||
self.default_config_dictionary = {
|
||||
"pair": pair,
|
||||
"is_short": False,
|
||||
"order_size": self.broker.get_default_order_size(),
|
||||
"no_of_safety_orders": 30,
|
||||
"max_short_safety_orders": 45,
|
||||
"concurrent_safety_orders": 3,
|
||||
"boosted_concurrent_safety_orders": 5,
|
||||
"safety_order_deviance": 2,
|
||||
"safety_order_scale": 0.0105,
|
||||
"dynamic_so_deviance": True,
|
||||
"bias": -0.5,
|
||||
"dsd_range": 1,
|
||||
"cleanup": True,
|
||||
"autoswitch": False,
|
||||
"liquidate_after_switch": False,
|
||||
"attempt_restart": True,
|
||||
"tp_mode": 3,
|
||||
"tp_level": 1.025,
|
||||
"tp_table": [],
|
||||
"check_slippage": True,
|
||||
"programmed_stop": False,
|
||||
"programmed_stop_time": 0,
|
||||
"boosted_deals_range": 4,
|
||||
"boosted_time_range": 3600,
|
||||
"boosted_amount": .01,
|
||||
"force_restart_if_retries_exhausted": False,
|
||||
"check_old_long_price": False #switch_to_short should flip this to True unless stated
|
||||
}
|
||||
# if self.broker.get_exchange_name()=="kucoin":
|
||||
# self.default_config_dictionary["concurrent_safety_orders"]=1
|
||||
# self.default_config_dictionary["boosted_concurrent_safety_orders"]=1
|
||||
self.config_file_path = f"configs/{pair.split('/')[0]}{pair.split('/')[1]}.json"
|
||||
self.config_dictionary = self.default_config_dictionary.copy()
|
||||
|
||||
#Loads from disk the config file (if it exists)
|
||||
if self.load_from_file()==1:
|
||||
#If the config file does not exist, write a new one with the default values and sign it with timestamp.
|
||||
self.config_dictionary["generated_at"] = int(time())
|
||||
self.save_to_file()
|
||||
if config_dict is not None:
|
||||
self.config_dictionary.update(config_dict)
|
||||
self.save_to_file()
|
||||
|
||||
|
||||
|
||||
def reset_to_default(self):
|
||||
self.config_dictionary = self.default_config_dictionary.copy()
|
||||
return 0
|
||||
|
||||
def get_pair(self):
|
||||
return self.config_dictionary["pair"]
|
||||
|
||||
def get_is_short(self):
|
||||
return self.config_dictionary["is_short"]
|
||||
|
||||
def get_order_size(self):
|
||||
return self.config_dictionary["order_size"]
|
||||
|
||||
def get_no_of_safety_orders(self):
|
||||
return self.config_dictionary["no_of_safety_orders"]
|
||||
|
||||
def get_max_short_safety_orders(self):
|
||||
return self.config_dictionary["max_short_safety_orders"]
|
||||
|
||||
def get_concurrent_safety_orders(self):
|
||||
return self.config_dictionary["concurrent_safety_orders"]
|
||||
|
||||
def get_boosted_concurrent_safety_orders(self):
|
||||
return self.config_dictionary["boosted_concurrent_safety_orders"]
|
||||
|
||||
def get_safety_order_deviance(self):
|
||||
return self.config_dictionary["safety_order_deviance"]
|
||||
|
||||
def get_safety_order_scale(self):
|
||||
return self.config_dictionary["safety_order_scale"]
|
||||
|
||||
def get_dynamic_so_deviance(self):
|
||||
return self.config_dictionary["dynamic_so_deviance"]
|
||||
|
||||
def get_bias(self):
|
||||
return self.config_dictionary["bias"]
|
||||
|
||||
def get_dsd_range(self):
|
||||
return self.config_dictionary["dsd_range"]
|
||||
|
||||
def get_cleanup(self):
|
||||
return self.config_dictionary["cleanup"]
|
||||
|
||||
def get_autoswitch(self):
|
||||
return self.config_dictionary["autoswitch"]
|
||||
|
||||
def get_liquidate_after_switch(self):
|
||||
return self.config_dictionary["liquidate_after_switch"]
|
||||
|
||||
def get_attempt_restart(self):
|
||||
return self.config_dictionary["attempt_restart"]
|
||||
|
||||
def get_tp_mode(self):
|
||||
return self.config_dictionary["tp_mode"]
|
||||
|
||||
def get_tp_level(self):
|
||||
return self.config_dictionary["tp_level"]
|
||||
|
||||
def get_tp_table(self):
|
||||
return self.config_dictionary["tp_table"]
|
||||
|
||||
def get_check_slippage(self):
|
||||
return self.config_dictionary["check_slippage"]
|
||||
|
||||
def get_programmed_stop(self):
|
||||
return self.config_dictionary["programmed_stop"]
|
||||
|
||||
def get_programmed_stop_time(self):
|
||||
return self.config_dictionary["programmed_stop_time"]
|
||||
|
||||
def get_boosted_deals_range(self):
|
||||
return self.config_dictionary["boosted_deals_range"]
|
||||
|
||||
def get_boosted_time_range(self):
|
||||
return self.config_dictionary["boosted_time_range"]
|
||||
|
||||
def get_boosted_amount(self):
|
||||
return self.config_dictionary["boosted_amount"]
|
||||
|
||||
def get_force_restart_if_retries_exhausted(self):
|
||||
return self.config_dictionary["force_restart_if_retries_exhausted"]
|
||||
|
||||
def get_check_old_long_price(self):
|
||||
return self.config_dictionary["check_old_long_price"]
|
||||
|
||||
def get_config_file_path(self):
|
||||
return self.config_file_path
|
||||
|
||||
def set_config_file_path(self, new_file_path: str):
|
||||
# if not isinstance(new_file_path, str):
|
||||
# self.broker.logger.log_this(f"File path provided is not a string",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_file_path = new_file_path
|
||||
return 0
|
||||
|
||||
def set_pair(self, pair: str):
|
||||
# if not isinstance(pair, str):
|
||||
# self.broker.logger.log_this(f"Pair provided is not a string",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["pair"] = pair
|
||||
return 0
|
||||
|
||||
def set_is_short(self, is_short: bool):
|
||||
# if not isinstance(is_short, bool):
|
||||
# self.broker.logger.log_this(f"Is short provided is not a boolean",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["is_short"] = is_short
|
||||
return 0
|
||||
|
||||
def set_order_size(self, order_size):
|
||||
# if not isinstance(order_size, (int, float)):
|
||||
# self.broker.logger.log_this(f"Order size provided is not a number",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["order_size"] = order_size
|
||||
return 0
|
||||
|
||||
def set_no_of_safety_orders(self, no_of_safety_orders: int):
|
||||
# if not isinstance(no_of_safety_orders, int):
|
||||
# self.broker.logger.log_this(f"No of safety orders provided is not an integer",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["no_of_safety_orders"] = no_of_safety_orders
|
||||
return 0
|
||||
|
||||
def set_max_short_safety_orders(self, max_short_safety_orders: int):
|
||||
# if not isinstance(max_short_safety_orders, int):
|
||||
# self.broker.logger.log_this(f"Max short safety orders provided is not an integer",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["max_short_safety_orders"] = max_short_safety_orders
|
||||
return 0
|
||||
|
||||
def set_concurrent_safety_orders(self, concurrent_safety_orders: int):
|
||||
# if not isinstance(concurrent_safety_orders, int):
|
||||
# self.broker.logger.log_this(f"Max concurrent safety orders provided is not an integer",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["concurrent_safety_orders"] = concurrent_safety_orders
|
||||
return 0
|
||||
|
||||
def set_boosted_concurrent_safety_orders(self, boosted_concurrent_safety_orders: int):
|
||||
# if not isinstance(concurrent_safety_orders, int):
|
||||
# self.broker.logger.log_this(f"Max concurrent safety orders provided is not an integer",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["boosted_concurrent_safety_orders"] = boosted_concurrent_safety_orders
|
||||
return 0
|
||||
|
||||
def set_safety_order_deviance(self, safety_order_deviance: int):
|
||||
# if not isinstance(safety_order_deviance, int):
|
||||
# self.broker.logger.log_this(f"Safety order deviance provided is not an integer",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["safety_order_deviance"] = safety_order_deviance
|
||||
return 0
|
||||
|
||||
def set_safety_order_scale(self, safety_order_scale: float):
|
||||
# if not isinstance(safety_order_scale, float):
|
||||
# self.broker.logger.log_this(f"Safety order scale provided is not a float",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["safety_order_scale"] = safety_order_scale
|
||||
return 0
|
||||
|
||||
def set_dynamic_so_deviance(self, dynamic_so_deviance: bool):
|
||||
# if not isinstance(dynamic_so_deviance, bool):
|
||||
# self.broker.logger.log_this(f"Dynamic safety order deviance provided is not a boolean",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["dynamic_so_deviance"] = dynamic_so_deviance
|
||||
return 0
|
||||
|
||||
def set_bias(self, bias: float):
|
||||
# if not isinstance(bias, float):
|
||||
# self.broker.logger.log_this(f"Bias provided is not a float",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["bias"] = bias
|
||||
return 0
|
||||
|
||||
def set_dsd_range(self, dsd_range):
|
||||
# if not isinstance(dsd_range, (int, float)):
|
||||
# self.broker.logger.log_this(f"dsd_range must be an int or a float",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["dsd_range"] = dsd_range
|
||||
return 0
|
||||
|
||||
def set_cleanup(self, cleanup: bool):
|
||||
# if not isinstance(cleanup, bool):
|
||||
# self.broker.logger.log_this(f"cleanup must be a boolean",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["cleanup"] = cleanup
|
||||
return 0
|
||||
|
||||
def set_autoswitch(self, autoswitch: bool):
|
||||
# if not isinstance(autoswitch, bool):
|
||||
# self.broker.logger.log_this(f"autoswitch must be a boolean",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["autoswitch"] = autoswitch
|
||||
return 0
|
||||
|
||||
def set_liquidate_after_switch(self, liquidate_after_switch: bool):
|
||||
# if not isinstance(liquidate_after_switch, bool):
|
||||
# self.broker.logger.log_this(f"liquidate_after_switch must be a boolean",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["liquidate_after_switch"] = liquidate_after_switch
|
||||
self.save_to_file()
|
||||
return 0
|
||||
|
||||
def set_tp_mode(self, tp_mode: int):
|
||||
# if not isinstance(tp_mode, int):
|
||||
# self.broker.logger.log_this(f"tp_mode must be an integer",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["tp_mode"] = tp_mode
|
||||
return 0
|
||||
|
||||
def set_tp_level(self, tp_level: float):
|
||||
# if not isinstance(tp_level, float):
|
||||
# self.broker.logger.log_this(f"tp_level must be a float",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["tp_level"] = tp_level
|
||||
return 0
|
||||
|
||||
def set_tp_table(self, tp_table: list):
|
||||
# if not isinstance(tp_table, list):
|
||||
# self.broker.logger.log_this(f"tp_table must be a list",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["tp_table"] = tp_table
|
||||
return 0
|
||||
|
||||
def set_check_slippage(self, check_slippage: bool):
|
||||
# if not isinstance(check_slippage, bool):
|
||||
# self.broker.logger.log_this(f"check_slippage must be a boolean",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["check_slippage"] = check_slippage
|
||||
return 0
|
||||
|
||||
def set_programmed_stop(self, programmed_stop: bool):
|
||||
# if not isinstance(programmed_stop, bool):
|
||||
# self.broker.logger.log_this(f"programmed_stop must be a boolean",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["programmed_stop"] = programmed_stop
|
||||
return 0
|
||||
|
||||
def set_programmed_stop_time(self, programmed_stop_time):
|
||||
# if not isinstance(programmed_stop_time, (int,float)):
|
||||
# self.broker.logger.log_this(f"programmed_stop_time must be an int or a float",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["programmed_stop_time"] = programmed_stop_time
|
||||
return 0
|
||||
|
||||
def set_boosted_deals_range(self, boosted_deals_range: int):
|
||||
# if not isinstance(boosted_deals_range, int):
|
||||
# self.broker.logger.log_this(f"boosted_deals_range must be an int",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["boosted_deals_range"] = boosted_deals_range
|
||||
return 0
|
||||
|
||||
def set_boosted_time_range(self, boosted_time_range: int):
|
||||
# if not isinstance(boosted_time_range, int):
|
||||
# self.broker.logger.log_this(f"boosted_time_range must be an int",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["boosted_time_range"] = boosted_time_range
|
||||
return 0
|
||||
|
||||
def set_boosted_amount(self, boosted_amount: float):
|
||||
# if not isinstance(boosted_amount, float):
|
||||
# self.broker.logger.log_this(f"boosted_amount must be a float",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["boosted_amount"] = boosted_amount
|
||||
return 0
|
||||
|
||||
def set_force_restart_if_retries_exhausted(self, force_restart_if_retries_exhausted: bool):
|
||||
# if not isinstance(force_restart_if_retries_exhausted, bool):
|
||||
# self.broker.logger.log_this(f"force_restart_if_retries_exhausted must be a bool",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["force_restart_if_retries_exhausted"] = force_restart_if_retries_exhausted
|
||||
return 0
|
||||
|
||||
def set_check_old_long_price(self, check_old_long_price: bool):
|
||||
# if not isinstance(check_old_long_price, bool):
|
||||
# self.broker.logger.log_this(f"check_old_long_price must be a bool",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary["check_old_long_price"] = check_old_long_price
|
||||
return 0
|
||||
|
||||
|
||||
def save_to_file(self, file_path = None):
|
||||
if file_path is None:
|
||||
file_path = self.config_file_path
|
||||
# if not isinstance(file_path, str):
|
||||
# self.broker.logger.log_this(f"file_path must be a string",1,self.get_pair())
|
||||
# return 1
|
||||
try:
|
||||
with open(file_path, "w") as f:
|
||||
f.write(dumps(self.config_dictionary, indent=4))
|
||||
return 0
|
||||
except Exception as e:
|
||||
self.broker.logger.log_this(f"Error saving config to file: {file_path}: {e}",1,self.get_pair())
|
||||
return 1
|
||||
|
||||
def load_from_file(self, file_path = None):
|
||||
if file_path is None:
|
||||
file_path = self.config_file_path
|
||||
# if not isinstance(file_path, str):
|
||||
# self.broker.logger.log_this(f"file_path must be a string",1,self.get_pair())
|
||||
# return 1
|
||||
try:
|
||||
with open(file_path, "r") as f:
|
||||
self.set_config({**self.default_config_dictionary, **load(f)})
|
||||
return 0
|
||||
except Exception as e:
|
||||
self.broker.logger.log_this(f"Config file does not exist or is not readable: {e}",1,self.get_pair())
|
||||
return 1
|
||||
|
||||
def get_config(self):
|
||||
return self.config_dictionary
|
||||
|
||||
def set_config(self, config_dictionary: dict):
|
||||
'''
|
||||
Validates every key in the config dictionary and sets the config dictionary
|
||||
'''
|
||||
# if not isinstance(config_dictionary, dict):
|
||||
# self.broker.logger.log_this(f"config_dictionary must be a dictionary",1,self.get_pair())
|
||||
# return 1
|
||||
self.config_dictionary = config_dictionary
|
||||
return 0
|
||||
|
|
@ -1,63 +1,81 @@
|
|||
import json
|
||||
import collections
|
||||
import time
|
||||
import requests
|
||||
import credentials
|
||||
import sqlite3
|
||||
from contextlib import contextmanager
|
||||
from requests import get as requests_get
|
||||
from json import load, dumps
|
||||
from copy import deepcopy
|
||||
|
||||
|
||||
class broker:
|
||||
def __init__(self,exchange,read_config,config_filename):
|
||||
class Broker:
|
||||
def __init__(self,exchange,broker_config,config_filename):
|
||||
self.config_filename = config_filename
|
||||
self.read_config = read_config
|
||||
self.broker_config = broker_config
|
||||
self.exchange = exchange
|
||||
self.last_price = 0
|
||||
self.wait_time = 1 #Default wait time for API breathing room
|
||||
self.cooldown_multiplier = 2 #Default cooldown multiplier value
|
||||
if "cooldown_multiplier" in self.read_config:
|
||||
self.cooldown_multiplier = self.read_config["cooldown_multiplier"]
|
||||
self.wait_before_new_safety_order = 1
|
||||
if "wait_before_new_safety_order" in self.read_config:
|
||||
self.wait_before_new_safety_order = self.read_config["wait_before_new_safety_order"]
|
||||
self.empty_order = {"id": "", "status": "", "filled": 0, "remaining": 0, "price": 0, "cost": 0, "fees": [], "symbol": ""}
|
||||
self.retries = read_config["retries"] if "retries" in self.read_config else 10
|
||||
self.slippage_default_threshold = self.read_config["slippage_default_threshold"] if "slippage_default_threshold" in read_config else .03
|
||||
self.logger = logger(self.read_config)
|
||||
self.write_order_history = True #This should be a toggle in config_file
|
||||
|
||||
self.profits_database_filename = "profits/profits_database.db"
|
||||
self.database_connection = sqlite3.connect(self.profits_database_filename)
|
||||
self.database_cursor = self.database_connection.cursor()
|
||||
self.database_cursor.execute('''
|
||||
CREATE TABLE IF NOT EXISTS profits_table (
|
||||
timestamp REAL PRIMARY KEY,
|
||||
pair TEXT,
|
||||
amount REAL,
|
||||
exchange_name TEXT,
|
||||
order_id TEXT,
|
||||
order_history TEXT
|
||||
)
|
||||
''')
|
||||
self.database_connection.commit()
|
||||
self.database_connection.close()
|
||||
#Default values
|
||||
self.wait_time = self.broker_config.get("wait_time",.5)
|
||||
self.cooldown_multiplier = self.broker_config.get("cooldown_multiplier",2)
|
||||
self.wait_after_initial_market_order = self.broker_config.get("wait_after_initial_market_order",1)
|
||||
self.wait_before_new_safety_order = self.broker_config.get("wait_before_new_safety_order",1)
|
||||
self.retries = self.broker_config.get("retries",5)
|
||||
self.slippage_default_threshold = self.broker_config.get("slippage_default_threshold",.03)
|
||||
self.follow_order_history = self.broker_config.get("follow_order_history",False)
|
||||
self.write_order_history = self.broker_config.get("write_order_history", False)
|
||||
self.logger = Logger(self.broker_config)
|
||||
self.log_orders = self.broker_config.get("log_orders",False)
|
||||
|
||||
self.exchange.load_markets()
|
||||
#Initialize database
|
||||
self.profits_database_filename = "profits/profits_database.db"
|
||||
|
||||
self._db = sqlite3.connect(self.profits_database_filename,
|
||||
detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES,
|
||||
check_same_thread=False)
|
||||
self._db.row_factory = sqlite3.Row
|
||||
with self._db:
|
||||
self._db.execute('''
|
||||
CREATE TABLE IF NOT EXISTS profits_table (
|
||||
timestamp REAL PRIMARY KEY,
|
||||
pair TEXT,
|
||||
amount REAL,
|
||||
exchange_name TEXT,
|
||||
order_id TEXT,
|
||||
order_history TEXT
|
||||
)
|
||||
''')
|
||||
|
||||
#Load markets
|
||||
self.markets = self.exchange.load_markets()
|
||||
|
||||
#Populates deals cache
|
||||
self.deals_cache_length = 10
|
||||
self.deals_cache_length = 20
|
||||
self.deals_list = self.preload_deals(amount_to_preload=self.deals_cache_length)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def _cur(self):
|
||||
'''
|
||||
Database cursor
|
||||
'''
|
||||
cur = self._db.cursor()
|
||||
try:
|
||||
yield cur
|
||||
finally:
|
||||
cur.close()
|
||||
|
||||
|
||||
def preload_deals(self,amount_to_preload=10):
|
||||
'''
|
||||
Reads the last n deals from the database and returns them in a list
|
||||
'''
|
||||
connection = sqlite3.connect(self.profits_database_filename)
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(f"SELECT * FROM profits_table WHERE exchange_name = ? ORDER BY timestamp DESC LIMIT ?", (self.get_exchange_name(), amount_to_preload))
|
||||
result = cursor.fetchall()
|
||||
connection.close()
|
||||
|
||||
query = "SELECT * FROM profits_table WHERE exchange_name = ? ORDER BY timestamp DESC LIMIT ?"
|
||||
with self._cur() as cur:
|
||||
cur.execute(query, (self.get_exchange_name(), amount_to_preload))
|
||||
result = cur.fetchall()
|
||||
|
||||
return [(row[0],row[1],row[2],row[3],row[4],"") for row in result]
|
||||
|
||||
|
||||
|
|
@ -65,11 +83,29 @@ class broker:
|
|||
return self.deals_list
|
||||
|
||||
|
||||
def get_log_orders(self):
|
||||
return self.log_orders
|
||||
|
||||
|
||||
def set_log_orders(self,log_orders:bool):
|
||||
self.log_orders = log_orders
|
||||
return 0
|
||||
|
||||
|
||||
def get_symbol(self,pair):
|
||||
if "/" in pair:
|
||||
return pair
|
||||
for item in self.markets:
|
||||
if f"{self.markets[item]['base']}{self.markets[item]['quote']}"==pair:
|
||||
return self.markets[item]["symbol"]
|
||||
return "Error"
|
||||
|
||||
def all_markets(self,no_retries=False):
|
||||
retries = self.retries
|
||||
while retries>0:
|
||||
try:
|
||||
return self.exchange.load_markets()
|
||||
self.markets = self.exchange.load_markets()
|
||||
return self.markets
|
||||
except Exception as e:
|
||||
self.logger.log_this(f"Exception in reload_markets: {e}")
|
||||
if no_retries:
|
||||
|
|
@ -79,9 +115,26 @@ class broker:
|
|||
return {}
|
||||
|
||||
|
||||
def validate_market(self,symbol):
|
||||
'''
|
||||
Checks that the market for the symbol exists, that it's a spot market and that it's active.
|
||||
Returns True if the market is valid, False otherwise.
|
||||
'''
|
||||
if symbol not in self.markets:
|
||||
self.logger.log_this(f"Market {symbol} not found in the exchange")
|
||||
return False
|
||||
if self.markets[symbol]['spot'] == False:
|
||||
self.logger.log_this(f"Market {symbol} is not a spot market")
|
||||
return False
|
||||
if self.markets[symbol]['active'] == False:
|
||||
self.logger.log_this(f"Market {symbol} is not active")
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def reload_markets(self):
|
||||
try:
|
||||
self.exchange.load_markets(reload=True)
|
||||
self.markets = self.exchange.load_markets(reload=True)
|
||||
return 0
|
||||
except Exception as e:
|
||||
self.logger.log_this(f"Exception in reload_markets: {e}")
|
||||
|
|
@ -93,21 +146,13 @@ class broker:
|
|||
Returns the timestamps of the last trades from the database for the boosting algorithm
|
||||
'''
|
||||
|
||||
retries = self.retries
|
||||
while retries>0:
|
||||
try:
|
||||
database_connection = sqlite3.connect(self.profits_database_filename)
|
||||
database_cursor = database_connection.cursor()
|
||||
database_cursor.execute(f"SELECT * FROM profits_table WHERE timestamp >= {time.time()-timespan} ORDER BY timestamp")
|
||||
rows = database_cursor.fetchall()
|
||||
return [item[0] for item in rows if item[1]==pair]
|
||||
except Exception as e:
|
||||
self.logger.log_this(f"Exception in preload_timestamps: {e}")
|
||||
if no_retries:
|
||||
break
|
||||
retries-=1
|
||||
time.sleep(self.wait_time)
|
||||
return []
|
||||
limit = time.time()-timespan
|
||||
query = "SELECT * FROM profits_table WHERE timestamp >= ? ORDER BY timestamp"
|
||||
|
||||
with self._cur() as cur:
|
||||
cur.execute(query,(limit,))
|
||||
rows = cur.fetchall()
|
||||
return [item[0] for item in rows if item[1]==pair]
|
||||
|
||||
|
||||
def write_profit_to_cache(self,dataset):
|
||||
|
|
@ -124,22 +169,11 @@ class broker:
|
|||
'''
|
||||
dataset format: (timestamp,pair,amount,exchange_name,order_id,order_history)
|
||||
'''
|
||||
retries = self.retries
|
||||
while retries>0:
|
||||
try:
|
||||
database_connection = sqlite3.connect(self.profits_database_filename)
|
||||
database_cursor = database_connection.cursor()
|
||||
database_cursor.execute('INSERT INTO profits_table VALUES(?, ?, ?, ?, ?, ?)', dataset)
|
||||
database_connection.commit()
|
||||
database_connection.close()
|
||||
except Exception as e:
|
||||
self.logger.log_this(f"Exception in write_profit_to_db: {e}")
|
||||
if no_retries:
|
||||
break
|
||||
retries-=1
|
||||
time.sleep(self.wait_time)
|
||||
return 0
|
||||
return 1
|
||||
|
||||
query = "INSERT INTO profits_table VALUES(?, ?, ?, ?, ?, ?)"
|
||||
with self._db:
|
||||
self._db.execute(query, dataset)
|
||||
return 0
|
||||
|
||||
|
||||
def check_for_duplicate_profit_in_db(self,order,no_retries=False):
|
||||
|
|
@ -148,29 +182,21 @@ class broker:
|
|||
Compares the id of the last profit order with the one in the database.
|
||||
'''
|
||||
|
||||
retries = self.retries
|
||||
while retries>0:
|
||||
try:
|
||||
database_connection = sqlite3.connect(self.profits_database_filename)
|
||||
database_cursor = database_connection.cursor()
|
||||
database_cursor.execute(f"SELECT * FROM profits_table WHERE pair = '{order['symbol']}' ORDER BY timestamp DESC LIMIT 1;")
|
||||
rows = database_cursor.fetchall()
|
||||
database_connection.close()
|
||||
if rows==[]:
|
||||
return False
|
||||
return order["id"]==rows[0][4]
|
||||
except Exception as e:
|
||||
self.logger.log_this(f"Exception in check_for_duplicate_profit_in_db: {e}",1)
|
||||
if no_retries:
|
||||
break
|
||||
retries-=1
|
||||
time.sleep(self.wait_time)
|
||||
return False
|
||||
query = f"SELECT * FROM profits_table WHERE pair = ? ORDER BY timestamp DESC LIMIT 1;"
|
||||
with self._cur() as cur:
|
||||
cur.execute(query, (order["symbol"],))
|
||||
result = cur.fetchone()
|
||||
if result is None:
|
||||
return False
|
||||
return order["id"]==result[4]
|
||||
|
||||
|
||||
def get_write_order_history(self):
|
||||
return self.write_order_history
|
||||
|
||||
def get_follow_order_history(self):
|
||||
return self.follow_order_history
|
||||
|
||||
def get_cooldown_multiplier(self):
|
||||
return self.cooldown_multiplier
|
||||
|
||||
|
|
@ -178,6 +204,13 @@ class broker:
|
|||
self.cooldown_multiplier = value
|
||||
return 0
|
||||
|
||||
def get_wait_after_initial_market_order(self):
|
||||
return self.wait_after_initial_market_order
|
||||
|
||||
def set_wait_after_initial_market_order(self, value:float):
|
||||
self.wait_after_initial_market_order = value
|
||||
return 0
|
||||
|
||||
def get_wait_before_new_safety_order(self):
|
||||
return self.wait_before_new_safety_order
|
||||
|
||||
|
|
@ -186,11 +219,12 @@ class broker:
|
|||
return 0
|
||||
|
||||
def get_default_order_size(self):
|
||||
return self.read_config["default_order_size"]
|
||||
return self.broker_config["default_order_size"]
|
||||
|
||||
def set_default_order_size(self,size):
|
||||
try:
|
||||
self.read_config["default_order_size"] = float(size)
|
||||
self.broker_config["default_order_size"] = float(size)
|
||||
self.rewrite_config_file()
|
||||
except Exception as e:
|
||||
self.logger.log_this(f"Exception in set_default_order_size: {e}",1)
|
||||
return 1
|
||||
|
|
@ -228,7 +262,7 @@ class broker:
|
|||
|
||||
|
||||
def get_exchange_name(self):
|
||||
return self.read_config["exchange"]
|
||||
return self.broker_config["exchange"]
|
||||
|
||||
|
||||
def set_wait_time(self,sec):
|
||||
|
|
@ -252,33 +286,33 @@ class broker:
|
|||
|
||||
|
||||
def get_config(self):
|
||||
return deepcopy(self.read_config)
|
||||
return deepcopy(self.broker_config)
|
||||
|
||||
|
||||
def set_config(self,new_config):
|
||||
self.read_config = deepcopy(new_config)
|
||||
self.broker_config = deepcopy(new_config)
|
||||
return 0
|
||||
|
||||
|
||||
def reload_config_file(self):
|
||||
try:
|
||||
with open(self.config_filename) as f:
|
||||
self.read_config = json.load(f)
|
||||
self.broker_config = load(f)
|
||||
except Exception as e:
|
||||
self.logger.log_this(f"Exception while reading the config file: {e}",1)
|
||||
|
||||
|
||||
def add_pair_to_config(self,pair):
|
||||
if pair not in self.read_config["pairs"]:
|
||||
self.read_config["pairs"].append(pair)
|
||||
if pair not in self.broker_config["pairs"]:
|
||||
self.broker_config["pairs"].append(pair)
|
||||
return 0
|
||||
return 1
|
||||
|
||||
|
||||
def remove_pair_from_config(self,pair):
|
||||
try:
|
||||
if pair in self.read_config["pairs"]:
|
||||
self.read_config["pairs"].remove(pair)
|
||||
if pair in self.broker_config["pairs"]:
|
||||
self.broker_config["pairs"].remove(pair)
|
||||
return 0
|
||||
self.logger.log_this("Pair does not exist - Can't remove from read_config",1,pair)
|
||||
return 2
|
||||
|
|
@ -288,31 +322,34 @@ class broker:
|
|||
|
||||
|
||||
def get_pairs(self):
|
||||
return self.read_config["pairs"]
|
||||
return self.broker_config["pairs"]
|
||||
|
||||
|
||||
def clear_pairs(self):
|
||||
self.read_config["pairs"].clear()
|
||||
self.broker_config["pairs"].clear()
|
||||
return 0
|
||||
|
||||
|
||||
def get_lap_time(self):
|
||||
return self.read_config["lap_time"]
|
||||
return self.broker_config["lap_time"]
|
||||
|
||||
|
||||
def set_lap_time(self,new_lap_time):
|
||||
try:
|
||||
self.read_config["lap_time"]=float(new_lap_time)
|
||||
self.broker_config["lap_time"]=float(new_lap_time)
|
||||
return 0
|
||||
except Exception as e:
|
||||
self.logger.log_this(f"Can't set new lap time. {new_lap_time} is an invalid entry. Exception: {e}",1)
|
||||
return 1
|
||||
|
||||
|
||||
def rewrite_config_file(self):
|
||||
def rewrite_config_file(self, backup=False):
|
||||
try:
|
||||
if backup:
|
||||
with open(f"{self.exchange}.bak","w") as c:
|
||||
c.write(dumps(self.broker_config, indent=4))
|
||||
with open(f"{self.config_filename}","w") as f:
|
||||
f.write(json.dumps(self.read_config, indent=4))
|
||||
f.write(dumps(self.broker_config, indent=4))
|
||||
return 0
|
||||
except Exception as e:
|
||||
self.logger.log_this(f"Problems writing the config file. Exception: {e}",1)
|
||||
|
|
@ -360,10 +397,13 @@ class broker:
|
|||
if self.get_exchange_name()=="binance":
|
||||
a = self.exchange.fetch_last_prices(pair_list)
|
||||
return {x: a[x]["price"] for x in a.keys()}
|
||||
elif self.get_exchange_name()=="kucoin":
|
||||
a = self.exchange.fetch_tickers(pair_list)
|
||||
if pair_list is None:
|
||||
return {x: a[x]["close"] for x in a.keys()}
|
||||
return {x: a[x]["close"] for x in a.keys() if x in pair_list}
|
||||
else:
|
||||
#a = self.exchange.fetch_tickers(pair_list)
|
||||
a = self.exchange.fetch_tickers()
|
||||
#return {x.upper(): a[x]["close"] for x in a.keys() if x.upper() in pair_list}
|
||||
if pair_list is None:
|
||||
return {x: a[x]["close"] for x in a.keys()}
|
||||
return {x: a[x]["close"] for x in a.keys() if x in pair_list}
|
||||
|
|
@ -384,13 +424,10 @@ class broker:
|
|||
:param no_retries: if True, will not retry if exception occurs
|
||||
:return: closing price of trading pair
|
||||
'''
|
||||
|
||||
retries = self.retries
|
||||
while retries>0:
|
||||
try:
|
||||
pair = symbol
|
||||
a = self.exchange.fetch_ticker(pair)
|
||||
self.last_price = a["close"]
|
||||
self.last_price = self.exchange.fetch_ticker(symbol)["close"]
|
||||
return self.last_price
|
||||
except Exception as e:
|
||||
self.logger.log_this(f"Exception in get_ticker_price: {e}",1)
|
||||
|
|
@ -441,7 +478,7 @@ class broker:
|
|||
try:
|
||||
return orderbook["bids"][0][0]
|
||||
except Exception as e:
|
||||
self.logger.log_this(f"Exception getting top mid price: {e}",1,symbol)
|
||||
self.logger.log_this(f"Exception getting top bid price: {e}",1,symbol)
|
||||
return self.get_ticker_price(symbol)
|
||||
|
||||
|
||||
|
|
@ -486,56 +523,56 @@ class broker:
|
|||
return []
|
||||
|
||||
|
||||
def fetch_full_orders(self,pairs=None) -> list:
|
||||
'''
|
||||
Returns a list of all orders on the exchange
|
||||
|
||||
:param pairs: list of pairs to get orders for
|
||||
:return: list of orders
|
||||
'''
|
||||
|
||||
if pairs is None:
|
||||
pairs = []
|
||||
try:
|
||||
orders = []
|
||||
if self.get_exchange_name()=="binance":
|
||||
orders = self.get_opened_orders_binance(pairs)
|
||||
else:
|
||||
orders = self.get_opened_orders()
|
||||
return [] if orders is None else orders
|
||||
except Exception as e:
|
||||
self.logger.log_this(f"Exception in fetch_full_orders: {e}",2)
|
||||
return []
|
||||
|
||||
|
||||
def fetch_open_orders(self,pairs=None) -> list:
|
||||
'''
|
||||
Returns a list of IDs of all open orders on the exchange
|
||||
Returns a list of all open orders on the exchange
|
||||
|
||||
:param pairs: list of pairs to get opened orders
|
||||
:return: list of IDs of all open orders
|
||||
:return: list of all open orders
|
||||
'''
|
||||
|
||||
if pairs is None:
|
||||
pairs = []
|
||||
try:
|
||||
#id_list = []
|
||||
if self.get_exchange_name()=="binance":
|
||||
return self.get_opened_orders_binance(pairs)
|
||||
return self.get_opened_orders()
|
||||
#else:
|
||||
# orders = self.get_opened_orders()
|
||||
#if orders!=[]:
|
||||
# id_list.extend(x["id"] for x in orders)
|
||||
#return id_list
|
||||
if self.broker_config.get("unified_order_query"):
|
||||
return self.exchange.fetch_open_orders()
|
||||
result = []
|
||||
for pair in pairs:
|
||||
a = self.exchange.fetch_open_orders(pair)
|
||||
result.extend(iter(a))
|
||||
return result
|
||||
elif self.get_exchange_name()=="kucoin":
|
||||
return self.exchange.fetch_open_orders(params={"pageSize": "500"})
|
||||
else:
|
||||
return self.exchange.fetch_open_orders()
|
||||
except Exception as e:
|
||||
self.logger.log_this(f"Exception in fetch_open_orders: {e}",2)
|
||||
return []
|
||||
|
||||
|
||||
def get_opened_orders(self,no_retries=False): #It should return a list of all opened orders
|
||||
|
||||
|
||||
def fetch_closed_orders(self,pairs=None) -> list:
|
||||
'''
|
||||
Returns a list of all the orders on the exchange
|
||||
Returns a list of all closed orders on the exchange
|
||||
|
||||
:param pairs: list of pairs to get opened orders
|
||||
:return: list of all open orders
|
||||
'''
|
||||
|
||||
if pairs is None:
|
||||
pairs = []
|
||||
try:
|
||||
if self.get_exchange_name()=="binance":
|
||||
return self.get_closed_orders_binance(pairs)
|
||||
return self.get_closed_orders()
|
||||
except Exception as e:
|
||||
self.logger.log_this(f"Exception in fetch_closed_orders: {e}",2)
|
||||
return []
|
||||
|
||||
|
||||
def get_closed_orders(self,pair=None,no_retries=False): #It should return a list of all opened orders
|
||||
'''
|
||||
Returns a list of all the open orders on the exchange
|
||||
|
||||
:param pairs: list of pairs
|
||||
:return: list of all the open orders on the exchange
|
||||
|
|
@ -544,9 +581,9 @@ class broker:
|
|||
retries = self.retries
|
||||
while retries>0:
|
||||
try:
|
||||
return self.exchange.fetch_open_orders()
|
||||
return self.exchange.fetch_closed_orders(pair)
|
||||
except Exception as e:
|
||||
self.logger.log_this(f"Exception in get_opened_orders: {e}",1)
|
||||
self.logger.log_this(f"Exception in get_closed_orders: {e}",1)
|
||||
if no_retries:
|
||||
break
|
||||
time.sleep(self.wait_time)
|
||||
|
|
@ -554,24 +591,24 @@ class broker:
|
|||
return []
|
||||
|
||||
|
||||
def get_opened_orders_binance(self,pairs):
|
||||
def get_closed_orders_binance(self,pairs):
|
||||
'''
|
||||
Returns a list of all the open orders on the exchange
|
||||
Returns a list of all the closed orders on the exchange
|
||||
|
||||
:param pairs: list of pairs
|
||||
:return: list of all the open orders on the exchange
|
||||
:return: list of all the closed orders on the exchange
|
||||
'''
|
||||
|
||||
try:
|
||||
if "unified_order_query" in self.read_config and self.read_config["unified_order_query"] is True:
|
||||
return self.exchange.fetch_open_orders()
|
||||
if self.broker_config.get("unified_order_query"):
|
||||
return self.exchange.fetch_closed_orders()
|
||||
result = []
|
||||
for pair in pairs:
|
||||
a = self.exchange.fetch_open_orders(pair)
|
||||
a = self.exchange.fetch_closed_orders(pair)
|
||||
result.extend(iter(a))
|
||||
return result
|
||||
except Exception as e:
|
||||
self.logger.log_this(f"Exception in get_opened_orders_binance: {e}",1)
|
||||
self.logger.log_this(f"Exception in get_closed_orders_binance: {e}",1)
|
||||
return []
|
||||
|
||||
|
||||
|
|
@ -585,16 +622,15 @@ class broker:
|
|||
:return: 0 if order was succesfully canceled, 1 if not
|
||||
'''
|
||||
|
||||
pair = symbol
|
||||
tries = self.retries//2
|
||||
while tries>0:
|
||||
try:
|
||||
while self.get_order(id,pair)["status"]=="open":
|
||||
self.exchange.cancel_order(id,symbol=pair)
|
||||
while self.get_order(id,symbol)["status"]=="open":
|
||||
self.exchange.cancel_order(id,symbol)
|
||||
time.sleep(self.wait_time)
|
||||
return 0
|
||||
except Exception as e:
|
||||
if self.get_order(id,pair)["status"]=="canceled":
|
||||
if self.get_order(id,symbol)["status"]=="canceled":
|
||||
return 0
|
||||
self.logger.log_this(f"Exception in cancel_order: id {id} - exception: {e}",1)
|
||||
if no_retries:
|
||||
|
|
@ -628,7 +664,7 @@ class broker:
|
|||
return amount
|
||||
|
||||
|
||||
def new_simulated_market_order(self,symbol,size,side,amount_in_base=False,no_retries=False):
|
||||
def new_simulated_market_order(self,symbol,size,side,amount_in_base=False,no_retries=False,log=""):
|
||||
'''
|
||||
TODO: Emulating Market Orders With Limit Orders
|
||||
|
||||
|
|
@ -636,12 +672,12 @@ class broker:
|
|||
|
||||
WARNING this method can be risky due to high volatility, use it at your own risk and only use it when you know really well what you're doing!
|
||||
|
||||
Most of the time a market sell can be emulated with a limit sell at a very low price – the exchange will automatically make it a taker order for market price
|
||||
Most of the time a market sell can be emulated with a limit sell at a very low price, the exchange will automatically make it a taker order for market price
|
||||
(the price that is currently in your best interest from the ones that are available in the order book). When the exchange detects that you're selling for a very low price
|
||||
it will automatically offer you the best buyer price available from the order book. That is effectively the same as placing a market sell order. Thus market orders can be
|
||||
emulated with limit orders (where missing).
|
||||
|
||||
The opposite is also true – a market buy can be emulated with a limit buy for a very high price. Most exchanges will again close your order for best available price,
|
||||
The opposite is also true, a market buy can be emulated with a limit buy for a very high price. Most exchanges will again close your order for best available price,
|
||||
that is, the market price.
|
||||
|
||||
However, you should never rely on that entirely, ALWAYS test it with a small amount first! You can try that in their web interface first to verify the logic. You can sell
|
||||
|
|
@ -655,26 +691,29 @@ class broker:
|
|||
'''
|
||||
|
||||
retries = self.retries//2
|
||||
pair = symbol
|
||||
while retries>0:
|
||||
try:
|
||||
if self.get_exchange_name()=="gateio" and side=="buy" and not amount_in_base:
|
||||
new_order = self.exchange.create_market_buy_order_with_cost(pair, size)
|
||||
new_order = self.exchange.create_market_buy_order_with_cost(symbol, size)
|
||||
if self.log_orders:
|
||||
self.logger.log_order(f"New simulated market order: Symbol: {symbol} - Side: {side} - Size: {size} - ID: {new_order['id']} - Origin: {log}")
|
||||
else:
|
||||
order_book = self.get_order_book(symbol)
|
||||
if order_book=={}:
|
||||
self.logger.log_this(f"new_simulated_market_order. Order book returned an empty dictionary",1,symbol)
|
||||
return self.empty_order
|
||||
if amount_in_base or side!="buy":
|
||||
base_amount = self.amount_to_precision(pair,size)
|
||||
base_amount = self.amount_to_precision(symbol,size)
|
||||
else:
|
||||
avg_price = self.average_price_depth(order_book,size,"sell")
|
||||
base_amount = size/avg_price if avg_price is not None else size/self.get_ticker_price(symbol)
|
||||
price = self.find_minimum_viable_price(order_book,base_amount,side)
|
||||
#Maybe check for slippage here instead of within the trader itself? idk
|
||||
new_order = self.exchange.create_order(pair,"limit",side,base_amount,price)
|
||||
new_order = self.exchange.create_order(symbol,"limit",side,base_amount,price)
|
||||
if self.log_orders:
|
||||
self.logger.log_order(f"New simulated market order: Symbol: {symbol} - Side: {side} - Size: {size} - Price: {price} - ID: {new_order['id']} - Origin: {log}")
|
||||
time.sleep(self.wait_time)
|
||||
return self.get_order(new_order["id"],pair)
|
||||
return self.get_order(new_order["id"],symbol)
|
||||
except Exception as e:
|
||||
self.logger.log_this(f"new_simulated_market_order exception: {e}",1,symbol)
|
||||
if no_retries:
|
||||
|
|
@ -719,7 +758,7 @@ class broker:
|
|||
return None
|
||||
|
||||
|
||||
def new_market_order(self,symbol,size,side,amount_in_base=False,no_retries=False): #It should send a new market order to the exchange
|
||||
def new_market_order(self,symbol,size,side,amount_in_base=False,no_retries=False, log=""): #It should send a new market order to the exchange
|
||||
'''
|
||||
Sends a new market order to the exchange.
|
||||
|
||||
|
|
@ -730,25 +769,27 @@ class broker:
|
|||
:param no_retries: If True, the function will not try to fetch the order again if it fails
|
||||
'''
|
||||
|
||||
if self.read_config["simulate_market_orders"]:
|
||||
if self.broker_config["simulate_market_orders"]:
|
||||
return self.new_simulated_market_order(symbol,size,side,amount_in_base=amount_in_base)
|
||||
retries = self.retries
|
||||
pair = symbol
|
||||
while retries>0:
|
||||
try:
|
||||
if side=="buy":
|
||||
to_buy = float(size)
|
||||
if not amount_in_base:
|
||||
to_buy = float(size)/self.get_top_ask_price(pair)
|
||||
amount = self.amount_to_precision(pair,to_buy)
|
||||
to_buy = float(size)/self.get_top_ask_price(symbol)
|
||||
amount = self.amount_to_precision(symbol,to_buy)
|
||||
else:
|
||||
amount = self.amount_to_precision(pair,size) #Market sell orders are always nominated in base currency
|
||||
amount = self.amount_to_precision(symbol,size) #Market sell orders are always nominated in base currency
|
||||
|
||||
order_to_send = self.exchange.create_order(pair,"market",side,amount)
|
||||
order_to_send = self.exchange.create_order(symbol,"market",side,amount)
|
||||
if self.log_orders:
|
||||
self.logger.log_order(f"New market order: Symbol: {symbol} - Side: {side} - Size: {size} - ID: {order_to_send['id']} - Origin: {log}")
|
||||
time.sleep(self.wait_time)
|
||||
return self.get_order(order_to_send["id"],pair)
|
||||
|
||||
return self.get_order(order_to_send["id"],symbol)
|
||||
except Exception as e:
|
||||
self.logger.log_this(f"Exception in new_market_order: {e}",1,pair)
|
||||
self.logger.log_this(f"Exception in new_market_order: {e} - Side: {side} - Size: {size}",1,symbol)
|
||||
if no_retries:
|
||||
break
|
||||
time.sleep(self.wait_time)
|
||||
|
|
@ -796,7 +837,41 @@ class broker:
|
|||
return "the lowest price limit for sell orders is" in str(error_object).lower()
|
||||
|
||||
|
||||
def new_limit_order(self,symbol,size,side,price,no_retries=False):
|
||||
# def new_limit_orders(self, orders: list) -> list:
|
||||
# sent_orders = []
|
||||
# #Send the orders
|
||||
# tries = self.retries
|
||||
# while tries>=0:
|
||||
# try:
|
||||
# sent_orders = self.exchange.create_orders(orders)
|
||||
# except Exception as e:
|
||||
# self.logger.log_this(f"Exception while sending safety orders: {e}",1)
|
||||
# tries-=1
|
||||
# time.sleep(self.wait_time)
|
||||
# if tries==0:
|
||||
# return []
|
||||
|
||||
# #Retrieve the orders from the exchange by id to confirm that they were sent
|
||||
# #Specially for OKX, since the orders that create_orders return are empty (only id is present)
|
||||
# returned_orders = []
|
||||
# for order in sent_orders:
|
||||
# tries = self.retries
|
||||
# while tries>=0:
|
||||
# try:
|
||||
# returned_orders.append(self.get_order(order["id"],order["symbol"]))
|
||||
# time.sleep(self.wait_time)
|
||||
# except Exception as e:
|
||||
# self.logger.log_this(f"Exception while retrieving safety orders: {e}",1)
|
||||
# tries-=1
|
||||
# if tries==0:
|
||||
# if self.get_exchange_name()=="okex":
|
||||
# return returned_orders
|
||||
# returned_orders.append(order) #In the case of the other exchanges, we just assume that the order was sent and append it.
|
||||
# time.sleep(self.wait_time)
|
||||
# return returned_orders
|
||||
|
||||
|
||||
def new_limit_order(self,symbol,size,side,price,no_retries=False,log=""):
|
||||
'''
|
||||
Sends a new limit order.
|
||||
|
||||
|
|
@ -808,20 +883,15 @@ class broker:
|
|||
'''
|
||||
|
||||
tries = self.retries
|
||||
pair = symbol
|
||||
while tries>=0:
|
||||
try:
|
||||
order_to_send = self.exchange.create_order(pair,"limit",side,self.amount_to_precision(pair,size),price)
|
||||
order_to_send = self.exchange.create_order(symbol,"limit",side,self.amount_to_precision(symbol,size),price)
|
||||
time.sleep(self.wait_time)
|
||||
return self.get_order(order_to_send["id"],pair)
|
||||
#if order_to_send["amount"] is not None: # Because Kucoin etc etc
|
||||
# return self.get_order(order_to_send["id"],pair) #
|
||||
#self.logger.log_this(f"Error sending order: Null order returned",2,pair) #
|
||||
#self.cancel_order(order_to_send["id"],symbol,no_retries=True) #
|
||||
#retries-=1
|
||||
|
||||
if self.log_orders:
|
||||
self.logger.log_order(f"New limit order: Symbol: {symbol} - Side: {side} - Size: {size} - Price: {price} - ID: {order_to_send['id']} - Notes: {log}")
|
||||
return self.get_order(order_to_send["id"],symbol)
|
||||
except Exception as e:
|
||||
self.logger.log_this(f"Exception in new_limit_order - Side: {side} - Size: {size} - {self.amount_to_precision(pair,size)} - Exception: {e}",1,symbol)
|
||||
self.logger.log_this(f"Exception in new_limit_order - Side: {side} - Size: {size} - {self.amount_to_precision(symbol,size)} - Exception: {e}",1,symbol)
|
||||
if self.not_enough_balance_error(e):
|
||||
if tries<=self.retries//2: #Halves the amount of retries if there is a balance error.
|
||||
return 1
|
||||
|
|
@ -852,10 +922,9 @@ class broker:
|
|||
if id=="":
|
||||
return self.empty_order
|
||||
tries = self.retries
|
||||
pair = symbol
|
||||
while tries>0:
|
||||
try:
|
||||
return self.exchange.fetch_order(id,symbol=pair)
|
||||
return self.exchange.fetch_order(id,symbol)
|
||||
except Exception as e:
|
||||
self.logger.log_this(f"Exception in get_order: {e}",1,symbol)
|
||||
if no_retries:
|
||||
|
|
@ -873,10 +942,9 @@ class broker:
|
|||
:return: The market information.
|
||||
'''
|
||||
tries = self.retries
|
||||
pair = symbol
|
||||
while tries>0:
|
||||
try:
|
||||
return self.exchange.market(pair)
|
||||
return self.exchange.market(symbol)
|
||||
except Exception as e:
|
||||
self.logger.log_this(f"Exception in fetch_market: {e}",1,symbol)
|
||||
if no_retries:
|
||||
|
|
@ -894,10 +962,9 @@ class broker:
|
|||
:return: The ticker information.
|
||||
'''
|
||||
tries = self.retries
|
||||
pair = symbol
|
||||
while tries>0:
|
||||
try:
|
||||
return self.exchange.fetch_ticker(pair)
|
||||
return self.exchange.fetch_ticker(symbol)
|
||||
except Exception as e:
|
||||
self.logger.log_this(f"Exception in get_ticker: {e}")
|
||||
if no_retries:
|
||||
|
|
@ -918,14 +985,16 @@ class broker:
|
|||
market = self.fetch_market(pair)
|
||||
if market is None:
|
||||
return None
|
||||
if self.get_exchange_name() in ["okex","kucoin"]:
|
||||
if self.get_exchange_name() in ["okex","bybit"]:
|
||||
return float(market["limits"]["amount"]["min"])
|
||||
elif self.get_exchange_name() in ["kucoin"]:
|
||||
return max(float(market["limits"]["amount"]["min"]),(float(market["limits"]["cost"]["min"])+.25)/self.get_ticker_price(pair))
|
||||
elif self.get_exchange_name() in ["gateio"]:
|
||||
return (float(market["limits"]["cost"]["min"])+1)/self.get_ticker_price(pair)
|
||||
return (float(market["limits"]["cost"]["min"])+.1)/self.get_ticker_price(pair)
|
||||
elif self.get_exchange_name()=="binance":
|
||||
for line in market["info"]["filters"]:
|
||||
if line["filterType"] == "NOTIONAL":
|
||||
return (float(line["minNotional"])+1)/self.get_ticker_price(pair)
|
||||
return (float(line["minNotional"])+.5)/self.get_ticker_price(pair)
|
||||
return None
|
||||
|
||||
|
||||
|
|
@ -945,7 +1014,7 @@ class broker:
|
|||
if line["filterType"] == "NOTIONAL":
|
||||
#return self.broker.amount_to_precision(pair,(float(line["minNotional"])))
|
||||
return float(line["minNotional"])
|
||||
elif self.get_exchange_name()=="gateio":
|
||||
elif self.get_exchange_name() in ["gateio", "bybit"]:
|
||||
#return self.cost_to_precision(pair,float(market["info"]["min_base_amount"])*self.broker.get_mid_price(pair))
|
||||
return float(market["limits"]["cost"]["min"])
|
||||
elif self.get_exchange_name() in ["okex","kucoin"]:
|
||||
|
|
@ -959,8 +1028,8 @@ class broker:
|
|||
|
||||
:param pair: pair
|
||||
:return: step size
|
||||
|
||||
'''
|
||||
|
||||
market = self.fetch_market(pair)
|
||||
if market is None:
|
||||
return None
|
||||
|
|
@ -971,39 +1040,51 @@ class broker:
|
|||
return float(filter["stepSize"])
|
||||
elif self.get_exchange_name()=="kucoin":
|
||||
return float(market["info"]["baseIncrement"])
|
||||
elif self.get_exchange_name() in ["gateio","okex"]:
|
||||
elif self.get_exchange_name() in ["gateio", "okex", "bybit"]:
|
||||
return float(market["precision"]["amount"])
|
||||
except Exception as e:
|
||||
self.logger.log_this(f"Exception in get_step_size: {e}",1,pair)
|
||||
return None
|
||||
|
||||
|
||||
class logger:
|
||||
class Logger:
|
||||
def __init__(self,broker_config):
|
||||
self.broker_config = broker_config
|
||||
self.exchange_name = self.broker_config["exchange"]
|
||||
self.tg_credentials = credentials.get_credentials("telegram")
|
||||
self.log_list_max_length = 10
|
||||
self.log_list = self.preload_logs()
|
||||
self.log_list_max_length = 20 # log cache
|
||||
self.log_list = collections.deque(maxlen=self.log_list_max_length)
|
||||
self.preload_logs()
|
||||
|
||||
|
||||
def preload_logs(self):
|
||||
try:
|
||||
with open(f"logs/{self.exchange_name}.log","r") as f:
|
||||
self.log_list = f.readlines()
|
||||
return self.log_list[-self.log_list_max_length:]
|
||||
for line in f:
|
||||
self.log_list.append(line.rstrip("\n"))
|
||||
return 0
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return []
|
||||
return 1
|
||||
|
||||
|
||||
def refresh_logs(self):
|
||||
try:
|
||||
self.log_list.clear()
|
||||
self.preload_logs()
|
||||
return 0
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return 1
|
||||
|
||||
|
||||
def set_log_list_max_length(self, amount):
|
||||
self.log_list_max_length = amount
|
||||
return self.log_list_max_length
|
||||
|
||||
|
||||
def get_log_list(self):
|
||||
return self.log_list
|
||||
return list(self.log_list)
|
||||
|
||||
|
||||
def set_telegram_notifications(self, toggle):
|
||||
|
|
@ -1019,20 +1100,27 @@ class logger:
|
|||
'''
|
||||
Sends a Telegram message
|
||||
'''
|
||||
|
||||
tg_credentials = credentials.get_credentials("telegram")
|
||||
send_text = f"https://api.telegram.org/bot{tg_credentials['token']}/sendMessage?chat_id={tg_credentials['chatid']}&parse_mode=Markdown&text={message}"
|
||||
output = None
|
||||
if self.broker_config["telegram"] or ignore_config:
|
||||
output = requests.get(send_text,timeout=5).json() #5 seconds timeout. This could also be a tunable.
|
||||
if not output["ok"]:
|
||||
self.log_this(f"Error in send_tg_message: {output}")
|
||||
return 1
|
||||
return 0
|
||||
try:
|
||||
tg_credentials = credentials.get_credentials("telegram")
|
||||
send_text = f"https://api.telegram.org/bot{tg_credentials['token']}/sendMessage?chat_id={tg_credentials['chatid']}&parse_mode=Markdown&text={message}"
|
||||
output = None
|
||||
if self.broker_config["telegram"] or ignore_config:
|
||||
output = requests_get(send_text,timeout=5).json() #5 seconds timeout. This could also be a tunable.
|
||||
if not output["ok"]:
|
||||
self.log_this(f"Error in send_tg_message: {output}")
|
||||
return 1
|
||||
return 0
|
||||
except Exception as e:
|
||||
self.log_this(f"Error in send_tg_message: {e}",1)
|
||||
return 1
|
||||
|
||||
def log_order(self,message):
|
||||
with open(f"logs/orders.log","a") as log_file:
|
||||
log_file.write(time.strftime(f"[%Y/%m/%d %H:%M:%S] | {message}\n"))
|
||||
|
||||
def log_this(self,message,level=2,pair=None):
|
||||
'''
|
||||
Level -1: Force Telegram only
|
||||
Level 0: Screen, log file and Telegram
|
||||
Level 1: Screen and log file
|
||||
Level 2: Screen only
|
||||
|
|
@ -1042,27 +1130,24 @@ class logger:
|
|||
text = time.strftime(f"[%Y/%m/%d %H:%M:%S] | {pair_data}{message}")
|
||||
|
||||
print(text)
|
||||
|
||||
if level==-1:
|
||||
self.send_tg_message(message,ignore_config=True)
|
||||
return 0
|
||||
if level<2:
|
||||
try:
|
||||
#Write to log file
|
||||
with open(f"logs/{self.exchange_name}.log","a") as log_file:
|
||||
log_file.write(text+"\n")
|
||||
log_file.close()
|
||||
|
||||
#Append to log list
|
||||
self.log_list.append(text)
|
||||
|
||||
#Trim log list
|
||||
self.log_list = self.log_list[-self.log_list_max_length:]
|
||||
|
||||
except Exception as e:
|
||||
print("Can't write log file")
|
||||
print(e)
|
||||
|
||||
print(e)
|
||||
if level<1:
|
||||
self.send_tg_message(f"{self.broker_config['exchange'].capitalize()} | {pair_data}{message}",ignore_config=level==-1)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -24,7 +24,6 @@ cursor.execute("""SELECT strftime('%Y-%m-%d', timestamp, 'unixepoch', '-3 hours'
|
|||
last_60_days_rows = cursor.fetchall()
|
||||
|
||||
#Last 30 days query
|
||||
#cursor.execute("""SELECT strftime('%Y-%m-%d', timestamp, 'unixepoch', '-3 hours') AS day_utc3,
|
||||
cursor.execute("""SELECT strftime('%Y-%m-%d', timestamp, 'unixepoch', '-3 hours') AS day_utc3,
|
||||
SUM(amount) AS total_amount
|
||||
FROM profits_table
|
||||
|
|
@ -47,6 +46,15 @@ cursor.execute("""SELECT strftime('%Y-%m', timestamp, 'unixepoch', '-3 hours') A
|
|||
ORDER BY year_month_utc3;""")
|
||||
last_n_months_rows = cursor.fetchall()
|
||||
|
||||
#Last n months query
|
||||
cursor.execute("""SELECT strftime('%Y', timestamp, 'unixepoch', '-3 hours') AS year_utc3,
|
||||
SUM(amount) AS total_amount
|
||||
FROM profits_table
|
||||
WHERE strftime('%s', 'now') - timestamp <= 60 * 30 * 24 * 60 * 60 -- 48 months in seconds
|
||||
GROUP BY year_utc3
|
||||
ORDER BY year_utc3;""")
|
||||
last_n_years_rows = cursor.fetchall()
|
||||
|
||||
#Yearly totals
|
||||
cursor.execute("""SELECT strftime('%Y', timestamp, 'unixepoch', '-3 hours') AS year_utc3,
|
||||
SUM(amount) AS total_amount
|
||||
|
|
@ -66,7 +74,13 @@ print("Last 18 months:")
|
|||
print("-"*line_width)
|
||||
for row in last_n_months_rows[1:]:
|
||||
print(f"{row[0]}: {round(row[1],2)}")
|
||||
print("="*line_width)
|
||||
print("Last 5 years:")
|
||||
print("-"*line_width)
|
||||
for row in last_n_years_rows:
|
||||
print(f"{row[0]}: {round(row[1],2)}")
|
||||
print("-"*line_width)
|
||||
|
||||
print(f"Last 30 days average: {round(last_30_days[0][1]/30,2)}")
|
||||
print(f"Last 7 days average: {round(last_7_days[0][1]/7,2)}")
|
||||
cursor.execute("""SELECT strftime('%Y-%m-%d', timestamp, 'unixepoch', '-3 hours') AS day_utc3,
|
||||
|
|
@ -125,6 +139,9 @@ for row in by_exchange:
|
|||
if row[1]=="This Month":
|
||||
okex_amount = row[2]
|
||||
|
||||
#Close db
|
||||
cursor.close()
|
||||
|
||||
total_amount = binance_amount+gateio_amount+kucoin_amount+okex_amount
|
||||
|
||||
print(f"Binance: {round(binance_amount,2)} USDT ({round(binance_amount/total_amount*100,2)}%)")
|
||||
|
|
|
|||
|
|
@ -0,0 +1,467 @@
|
|||
from time import strftime
|
||||
from json import dumps, load
|
||||
|
||||
class StatusHandler:
|
||||
'''
|
||||
Handles the status of the trader and the validation of the parameters
|
||||
'''
|
||||
|
||||
def __init__(self, broker, base, quote, status_dict = None):
|
||||
self.broker = broker
|
||||
self.default_status_dictionary = {
|
||||
"pair": f"{base}/{quote}",
|
||||
"take_profit_order": broker.get_empty_order(),
|
||||
"take_profit_price": 0.0,
|
||||
"safety_orders": [],
|
||||
"safety_orders_filled": 0,
|
||||
"next_so_price": 0.0,
|
||||
"order_size": 0.0,
|
||||
"partial_profit": 0.0,
|
||||
"price": 0.0,
|
||||
"is_boosted": False,
|
||||
"is_short": False,
|
||||
"is_paused": False,
|
||||
"quote_spent": 0.0,
|
||||
"base_bought": 0.0,
|
||||
"so_amount": 0,
|
||||
"no_of_safety_orders": 0,
|
||||
"safety_price_table": [],
|
||||
"deal_uptime": 0.0,
|
||||
"total_uptime": 0.0,
|
||||
"fees_paid_in_base": 0.0,
|
||||
"fees_paid_in_quote": 0.0,
|
||||
"start_price": 0.0,
|
||||
"tp_mode": 0,
|
||||
"profit_table": [],
|
||||
"start_time": 0,
|
||||
"deal_start_time": 0,
|
||||
"stop_when_profit": False,
|
||||
"autoswitch": False,
|
||||
"liquidate_after_switch": False,
|
||||
"old_long": {},
|
||||
"pause_reason": "",
|
||||
"status_string": "",
|
||||
"deal_order_history": []
|
||||
}
|
||||
self.status_file_path = f"status/{base}{quote}.status"
|
||||
|
||||
self.status_dictionary = {k: v for k, v in self.default_status_dictionary.items()}
|
||||
if status_dict:
|
||||
self.status_dictionary.update(status_dict)
|
||||
self.save_to_file()
|
||||
|
||||
def get_pair(self):
|
||||
return self.status_dictionary["pair"]
|
||||
|
||||
def get_take_profit_order(self):
|
||||
return self.status_dictionary["take_profit_order"]
|
||||
|
||||
def get_take_profit_price(self):
|
||||
return self.status_dictionary["take_profit_price"]
|
||||
|
||||
def get_safety_orders(self):
|
||||
"""
|
||||
Returns the list of open safety orders
|
||||
"""
|
||||
return self.status_dictionary["safety_orders"]
|
||||
|
||||
def get_safety_orders_filled(self):
|
||||
return self.status_dictionary["safety_orders_filled"]
|
||||
|
||||
def get_next_so_price(self):
|
||||
return self.status_dictionary["next_so_price"]
|
||||
|
||||
def get_order_size(self):
|
||||
return self.status_dictionary["order_size"]
|
||||
|
||||
def get_partial_profit(self):
|
||||
return self.status_dictionary["partial_profit"]
|
||||
|
||||
def get_price(self):
|
||||
return self.status_dictionary["price"]
|
||||
|
||||
def get_is_boosted(self):
|
||||
return self.status_dictionary["is_boosted"]
|
||||
|
||||
def get_is_short(self):
|
||||
return self.status_dictionary["is_short"]
|
||||
|
||||
def get_is_paused(self):
|
||||
return self.status_dictionary["is_paused"]
|
||||
|
||||
def get_quote_spent(self):
|
||||
return self.status_dictionary["quote_spent"]
|
||||
|
||||
def get_base_bought(self):
|
||||
return self.status_dictionary["base_bought"]
|
||||
|
||||
def get_so_amount(self):
|
||||
return self.status_dictionary["so_amount"]
|
||||
|
||||
def get_no_of_safety_orders(self):
|
||||
return self.status_dictionary["no_of_safety_orders"]
|
||||
|
||||
def get_safety_price_table(self):
|
||||
return self.status_dictionary["safety_price_table"]
|
||||
|
||||
def get_deal_uptime(self):
|
||||
return self.status_dictionary["deal_uptime"]
|
||||
|
||||
def get_total_uptime(self):
|
||||
return self.status_dictionary["total_uptime"]
|
||||
|
||||
def get_fees_paid_in_base(self):
|
||||
return self.status_dictionary["fees_paid_in_base"]
|
||||
|
||||
def get_fees_paid_in_quote(self):
|
||||
return self.status_dictionary["fees_paid_in_quote"]
|
||||
|
||||
def get_start_price(self):
|
||||
return self.status_dictionary["start_price"]
|
||||
|
||||
def get_tp_mode(self):
|
||||
return self.status_dictionary["tp_mode"]
|
||||
|
||||
def get_profit_table(self):
|
||||
return self.status_dictionary["profit_table"]
|
||||
|
||||
def get_start_time(self):
|
||||
return self.status_dictionary["start_time"]
|
||||
|
||||
def get_deal_start_time(self):
|
||||
return self.status_dictionary["deal_start_time"]
|
||||
|
||||
def get_stop_when_profit(self):
|
||||
return self.status_dictionary["stop_when_profit"]
|
||||
|
||||
def get_autoswitch(self):
|
||||
return self.status_dictionary["autoswitch"]
|
||||
|
||||
def get_liquidate_after_switch(self):
|
||||
return self.status_dictionary["liquidate_after_switch"]
|
||||
|
||||
def get_old_long(self):
|
||||
return self.status_dictionary["old_long"]
|
||||
|
||||
def get_pause_reason(self):
|
||||
return self.status_dictionary["pause_reason"]
|
||||
|
||||
def get_status_string(self):
|
||||
return self.status_dictionary["status_string"]
|
||||
|
||||
def get_deal_order_history(self):
|
||||
return self.status_dictionary["deal_order_history"]
|
||||
|
||||
def get_status_file_path(self):
|
||||
return self.status_file_path
|
||||
|
||||
def set_pair(self, trading_pair):
|
||||
self.pair = trading_pair
|
||||
return 0
|
||||
|
||||
def set_status_file_path(self, new_file_path: str):
|
||||
# if not isinstance(new_file_path, str):
|
||||
# self.broker.logger.log_this(f"File path provided is not a string",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_file_path = new_file_path
|
||||
return 0
|
||||
|
||||
def set_tp_order_id(self, order_id: str):
|
||||
# if not isinstance(order_id, str):
|
||||
# self.broker.logger.log_this(f"Order id provided is not a string",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["tp_order_id"] = order_id
|
||||
return 0
|
||||
|
||||
def set_take_profit_order(self, order):
|
||||
#Validate order
|
||||
self.status_dictionary["take_profit_order"] = order
|
||||
return 0
|
||||
|
||||
def set_take_profit_price(self, price: float):
|
||||
# if not isinstance(price, float):
|
||||
# self.broker.logger.log_this(f"Price provided is not a float",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["take_profit_price"] = price
|
||||
return 0
|
||||
|
||||
def set_so_order_id(self, order_id: str):
|
||||
# if not isinstance(order_id, str):
|
||||
# self.broker.logger.log_this(f"Order id provided is not a string",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["so_order_id"] = order_id
|
||||
return 0
|
||||
|
||||
def set_safety_orders(self, orders: list):
|
||||
"""
|
||||
Replaces the whole safety orders list
|
||||
"""
|
||||
self.status_dictionary["safety_orders"] = orders
|
||||
return 0
|
||||
|
||||
def set_safety_orders_filled(self, amount: int):
|
||||
self.status_dictionary["safety_orders_filled"] = amount
|
||||
return 0
|
||||
|
||||
def set_next_so_price(self, price: float):
|
||||
# if not isinstance(price, float):
|
||||
# self.broker.logger.log_this(f"Price provided is not a float",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["next_so_price"] = price
|
||||
return 0
|
||||
|
||||
def set_order_size(self, size: float):
|
||||
# if not isinstance(size, float):
|
||||
# self.broker.logger.log_this(f"Size provided is not a float",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["order_size"] = size
|
||||
return 0
|
||||
|
||||
def set_partial_profit(self, profit: float):
|
||||
# if not isinstance(profit, float):
|
||||
# self.broker.logger.log_this(f"Profit provided is not a float",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["partial_profit"] = profit
|
||||
return 0
|
||||
|
||||
def set_price(self, price: float):
|
||||
# if not isinstance(price, float):
|
||||
# self.broker.logger.log_this(f"Price provided is not a float",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["price"] = price
|
||||
return 0
|
||||
|
||||
def set_is_boosted(self, is_boosted: bool):
|
||||
# if not isinstance(is_boosted, bool):
|
||||
# self.broker.logger.log_this(f"is_boosted provided is not a bool",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["is_boosted"] = is_boosted
|
||||
return 0
|
||||
|
||||
def set_is_short(self, is_short: bool):
|
||||
# if not isinstance(is_short, bool):
|
||||
# self.broker.logger.log_this(f"is_short provided is not a bool",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["is_short"] = is_short
|
||||
return 0
|
||||
|
||||
def set_is_paused(self, is_paused: bool):
|
||||
# if not isinstance(is_paused, bool):
|
||||
# self.broker.logger.log_this(f"is_paused provided is not a bool",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["is_paused"] = is_paused
|
||||
return 0
|
||||
|
||||
def set_quote_spent(self, quote_spent: float):
|
||||
# if not isinstance(quote_spent, float):
|
||||
# self.broker.logger.log_this(f"quote_spent provided is not a float",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["quote_spent"] = quote_spent
|
||||
return 0
|
||||
|
||||
def set_base_bought(self, base_bought: float):
|
||||
# if not isinstance(base_bought, float):
|
||||
# self.broker.logger.log_this(f"base_bought provided is not a float",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["base_bought"] = base_bought
|
||||
return 0
|
||||
|
||||
def set_so_amount(self, so_amount: int):
|
||||
# if not isinstance(so_amount, int):
|
||||
# self.broker.logger.log_this(f"so_amount provided is not an integer",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["so_amount"] = so_amount
|
||||
return 0
|
||||
|
||||
def set_no_of_safety_orders(self, number: int):
|
||||
# if not isinstance(number, int):
|
||||
# self.broker.logger.log_this(f"number provided is not an integer",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["no_of_safety_orders"] = number
|
||||
return 0
|
||||
|
||||
def set_safety_price_table(self, table: list):
|
||||
# if not isinstance(table, list):
|
||||
# self.broker.logger.log_this(f"table provided is not a list",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["safety_price_table"] = table
|
||||
return 0
|
||||
|
||||
def set_deal_uptime(self, uptime):
|
||||
# if not isinstance(uptime, (int, float)):
|
||||
# self.broker.logger.log_this(f"uptime provided is not a number",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["deal_uptime"] = uptime
|
||||
return 0
|
||||
|
||||
def set_total_uptime(self, uptime):
|
||||
# if not isinstance(uptime, (int, float)):
|
||||
# self.broker.logger.log_this(f"uptime provided is not a number",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["total_uptime"] = uptime
|
||||
return 0
|
||||
|
||||
def set_fees_paid_in_base(self, fees: float):
|
||||
# if not isinstance(fees, float):
|
||||
# self.broker.logger.log_this(f"value provided is not a float",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["fees_paid_in_base"] = fees
|
||||
return 0
|
||||
|
||||
def set_fees_paid_in_quote(self, fees: float):
|
||||
# if not isinstance(fees, float):
|
||||
# self.broker.logger.log_this(f"value provided is not a float",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["fees_paid_in_quote"] = fees
|
||||
return 0
|
||||
|
||||
def set_start_price(self, price: float):
|
||||
# if not isinstance(price, float):
|
||||
# self.broker.logger.log_this(f"value provided is not a float",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["start_price"] = price
|
||||
return 0
|
||||
|
||||
def set_tp_mode(self, mode: int):
|
||||
# if not isinstance(mode, int):
|
||||
# self.broker.logger.log_this(f"value provided is not an integer",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["tp_mode"] = mode
|
||||
return 0
|
||||
|
||||
def set_profit_table(self, table: list):
|
||||
# if not isinstance(table, list):
|
||||
# self.broker.logger.log_this(f"value provided is not a list",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["profit_table"] = table
|
||||
return 0
|
||||
|
||||
def set_start_time(self, time):
|
||||
# if not isinstance(time, (int, float)):
|
||||
# self.broker.logger.log_this(f"value provided is not a number",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["start_time"] = time
|
||||
return 0
|
||||
|
||||
def set_deal_start_time(self, time):
|
||||
# if not isinstance(time, (int, float)):
|
||||
# self.broker.logger.log_this(f"value provided is not a number",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["deal_start_time"] = time
|
||||
return 0
|
||||
|
||||
def set_stop_when_profit(self, stop: bool):
|
||||
# if not isinstance(stop, bool):
|
||||
# self.broker.logger.log_this(f"value provided is not a boolean",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["stop_when_profit"] = stop
|
||||
return 0
|
||||
|
||||
def set_autoswitch(self, switch: bool):
|
||||
# if not isinstance(switch, bool):
|
||||
# self.broker.logger.log_this(f"value provided is not a boolean",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["autoswitch"] = switch
|
||||
return 0
|
||||
|
||||
def set_liquidate_after_switch(self, switch: bool):
|
||||
# if not isinstance(switch, bool):
|
||||
# self.broker.logger.log_this(f"value provided is not a boolean",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["liquidate_after_switch"] = switch
|
||||
return 0
|
||||
|
||||
def set_old_long(self, old_long: dict):
|
||||
# if not isinstance(old_long, dict):
|
||||
# self.broker.logger.log_this(f"value provided is not a dictionary",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["old_long"] = old_long
|
||||
return 0
|
||||
|
||||
def set_pause_reason(self, reason: str):
|
||||
# if not isinstance(reason, str):
|
||||
# self.broker.logger.log_this(f"value provided is not a string",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["pause_reason"] = reason
|
||||
return 0
|
||||
|
||||
def set_status_string(self, string: str):
|
||||
# if not isinstance(string, str):
|
||||
# self.broker.logger.log_this(f"value provided is not a string",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["status_string"] = string
|
||||
return 0
|
||||
|
||||
def set_deal_order_history(self, deal_history: list):
|
||||
# if not isinstance(deal_history, list):
|
||||
# self.broker.logger.log_this(f"value provided is not a list",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary["deal_order_history"] = deal_history
|
||||
return 0
|
||||
|
||||
def add_safety_order(self, order):
|
||||
"""
|
||||
Appends a newly-created safety order to the internal list
|
||||
"""
|
||||
self.status_dictionary["safety_orders"].append(order)
|
||||
return 0
|
||||
|
||||
def remove_safety_order_by_id(self, order_id: str):
|
||||
"""
|
||||
Removes an order from the list (mostly used when that order is filled or canceled)
|
||||
"""
|
||||
orders = self.get_safety_orders()
|
||||
self.status_dictionary["safety_orders"] = [order for order in orders if order["id"] != order_id]
|
||||
return 0
|
||||
|
||||
def clear_deal_order_history(self):
|
||||
self.status_dictionary["deal_order_history"] = []
|
||||
return 0
|
||||
|
||||
def update_deal_order_history(self, new_deal: dict, note: str = ""):
|
||||
# if not isinstance(new_deal, dict):
|
||||
# self.broker.logger.log_this(f"value provided is not a dict",1,self.get_pair())
|
||||
id = new_deal["id"] if "id" in new_deal else None
|
||||
self.status_dictionary["deal_order_history"].append(f"{note} - {id}")
|
||||
return 0
|
||||
|
||||
def save_to_file(self, file_path = None, is_backup = False):
|
||||
if file_path is None:
|
||||
file_path = self.status_file_path
|
||||
if is_backup:
|
||||
try:
|
||||
with open(strftime(f"{file_path}_%Y-%m-%d_%H:%M:%S.json"), "w") as f:
|
||||
f.write(dumps(self.status_dictionary, indent=4))
|
||||
except Exception as e:
|
||||
self.broker.logger.log_this(f"Error creating status backup file: {e}",1)
|
||||
try:
|
||||
with open(file_path, "w") as f:
|
||||
f.write(dumps(self.status_dictionary, indent=4))
|
||||
return 0
|
||||
except Exception as e:
|
||||
self.broker.logger.log_this(f"Error saving status to file: {file_path}: {e}",1)
|
||||
return 1
|
||||
|
||||
def load_from_file(self, file_path = None):
|
||||
if file_path is None:
|
||||
file_path = self.status_file_path
|
||||
try:
|
||||
with open(file_path, "r") as f:
|
||||
self.status_dictionary = {**self.default_status_dictionary, **load(f)}
|
||||
return 0
|
||||
except Exception as e:
|
||||
self.broker.logger.log_this(f"Error loading status from file: {file_path}: {e}",1)
|
||||
return 1
|
||||
|
||||
def get_status(self):
|
||||
return self.status_dictionary
|
||||
|
||||
def set_status(self, dictionary: dict):
|
||||
'''
|
||||
Validates every key in the dictionary and then sets the status dictionary
|
||||
'''
|
||||
# if not isinstance(dictionary, dict):
|
||||
# self.broker.logger.log_this(f"status_dictionary provided is not a dictionary",1,self.get_pair())
|
||||
# return 1
|
||||
self.status_dictionary = dictionary
|
||||
return 0
|
||||
25
todo.txt
25
todo.txt
|
|
@ -1,25 +1,22 @@
|
|||
Mandatory:
|
||||
=========
|
||||
0. Mobile app.
|
||||
1. Stats webpage.
|
||||
2. Maintain local orderbooks for each trading pair, which enables:
|
||||
0. Stats webpage.
|
||||
1. Maintain local orderbooks for each trading pair, which enables:
|
||||
2a. Smart order pricing: Prioritization of fill speed over instant profit or vice versa
|
||||
3. Consolidate vocabulary (trader, pair and bot; instance & trader)
|
||||
4. Base add for short traders.
|
||||
5. Proper handling of order price too high/low in OKX (rare, it happens when under heavy volatility).
|
||||
6. Optimize database code.
|
||||
7. Things that should be objects (it's not 1994):
|
||||
* Orders.
|
||||
* Sredro.
|
||||
* A lot more.
|
||||
2. Proper handling of order price too high/low in OKX (rare, it happens when under heavy volatility).
|
||||
3. API documentation.
|
||||
4. Implement api key hashing.
|
||||
5. Dockerize.
|
||||
6. Earn should be integrated into the instance, in order to be able to invest the idle funds from the short traders.
|
||||
|
||||
|
||||
Would be nice to have:
|
||||
=====================
|
||||
0. Trader order: alphabetical; by uptime; by safety orders, by percentage_to_completion. (Although this may be more suitable for the web and mobile apps)
|
||||
1. Local implementation of amount_to_precision, cost_to_precision and price_to_precision. (Unless the plan is to continue to use CCXT forever)
|
||||
2. Instead of cancelling and resending the take profit order, you could just edit it (Kucoin only supports editing on high frequency orders)
|
||||
3. Round-robin trading pairs: Instead of a fixed list of trading pairs, after n closed deals the trader is terminated and a new one spawns, picking the trading pair
|
||||
2. Instead of cancelling and resending the take profit order, edit it (Kucoin only supports editing on high frequency orders)
|
||||
3. When autoswitching to long, instead of using a big market order, the last safety order should be a sell order of all the available funds.
|
||||
4. Round-robin trading pairs: Instead of a fixed list of trading pairs, after n closed deals the trader is terminated and a new one spawns, picking the trading pair
|
||||
from a pre-populated list (the trading pairs can be selected by using Yang-Zhang, Parkinson or another volatility indicator)
|
||||
This could be very benefitial, since it limits the long time commitment to a small list of trading pairs, enabling the instance to react to market trends very
|
||||
rapidly.
|
||||
|
|
@ -38,4 +35,4 @@ Maybe it's a good idea?:
|
|||
c. Order on screen: BASE/QUOTE | order_id followed | current_price | deal_close_price | total_volume_on_close | pct_to_profit | uptime
|
||||
d. In status bar: Total funds to be released.
|
||||
e. Change main screen: x traders online | y dusters online
|
||||
f. Since they only need to monitor if one order is filled and the data is already locally available, the extra API load will be negligible.
|
||||
f. Since they only need to monitor if one order is filled and the data is already locally available, there will be no extra API load.
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ with open(f"../configs/{sys.argv[1]}.json") as k:
|
|||
pwd = config["password"] if "password" in config else ""
|
||||
exch = sys.argv[1]
|
||||
if exch=="okex":
|
||||
exch="okex5"
|
||||
exch="okx"
|
||||
exchange_class = getattr(ccxt, sys.argv[1])
|
||||
exchange = exchange_class({
|
||||
"apiKey": config["key"],
|
||||
|
|
|
|||
|
|
@ -4,12 +4,19 @@ import json
|
|||
import credentials
|
||||
|
||||
try:
|
||||
earn_api_key = credentials.get_credentials("earn_api_key")["key"]
|
||||
if sys.argv[1]=="--testnet":
|
||||
is_testnet = True
|
||||
string_to_add = "TESTNET "
|
||||
api_key = credentials.get_credentials("testnet_api_key")["key"]
|
||||
base_url = credentials.get_url("testnet") #type: ignore
|
||||
exchanges = {"Binance":"/binance"}
|
||||
elif sys.argv[1]=="--local_testnet":
|
||||
is_testnet = True
|
||||
string_to_add = "LOCAL TESTNET "
|
||||
api_key = credentials.get_credentials("local_testnet_api_key")["key"]
|
||||
base_url = credentials.get_url("local_testnet") #type: ignore
|
||||
exchanges = {"Binance":":5001"}
|
||||
elif sys.argv[1]=="--mainnet":
|
||||
is_testnet = False
|
||||
string_to_add = "MAINNET "
|
||||
|
|
@ -24,6 +31,8 @@ except Exception as e:
|
|||
sys.exit()
|
||||
|
||||
headers = {'X-API-KEY': api_key}
|
||||
|
||||
earn_headers = {'X-API-KEY': earn_api_key}
|
||||
|
||||
command_list = f'''{string_to_add}COMMANDS:
|
||||
|
||||
|
|
@ -33,6 +42,18 @@ INSTANCE
|
|||
7) mod_global_tp_level 8) global_last_call 9) edit_loop_wait_time
|
||||
10) edit_call_wait_time 11) reload_markets 12) fetch_full_log
|
||||
13) paused_traders 14) fetch_log 15) edit_cooldown_multiplier
|
||||
16) get_balance 17) cancel_global_last_call
|
||||
18) mod_default_order_size 19) toggle_log_orders
|
||||
20) refresh_log_cache
|
||||
|
||||
EARN
|
||||
31) toggle_pause 32) get_step_size 33) set_step_size
|
||||
34) get_percentage 35) set_percentage 36) get_time_between_subscriptions
|
||||
37) set_time_between_subscriptions 38) get_time_between_redemptions
|
||||
39) set_time_between_redemptions 40) get_minimum_amount_in_trading_account
|
||||
41) set_minimum_amount_in_trading_account 42) get_last_subscription
|
||||
43) get_last_redemption 44) get_total_balance
|
||||
45) get_global_status 46) subscribe 47) redeem
|
||||
|
||||
TRADERS
|
||||
51) worker_status 52) get_all_worker_status
|
||||
|
|
@ -42,8 +63,10 @@ TRADERS
|
|||
62) mod_tp_level 63) last_call 64) deferred_last_call
|
||||
65) toggle_pause 66) toggle_cleanup 67) toggle_autoswitch
|
||||
68) toggle_check_old_long_price 69) switch_quote_currency
|
||||
70) reload_safety_order 71) view_old_long 72) switch_price
|
||||
73) backtests
|
||||
70) view_old_long 71) switch_price 72) reload_trader_config
|
||||
73) toggle_liquidate_after_switch 74) base_add_calculation
|
||||
75) mod_concurrent_safety_orders 76) force_trader_close
|
||||
77) mod_order_size
|
||||
|
||||
98) Change broker 99) Exit
|
||||
'''
|
||||
|
|
@ -78,12 +101,13 @@ def select_exchange(exchanges):
|
|||
Selects the exchange to use
|
||||
'''
|
||||
|
||||
selection = input("Enter exchange: (Binance, Gate.io, KuCoin, OKX) ").lower()
|
||||
for item in exchanges:
|
||||
if selection in item.lower():
|
||||
return item
|
||||
print("Invalid input")
|
||||
sys.exit()
|
||||
while True:
|
||||
selection = input("Enter exchange: (Binance, Gate.io, KuCoin, OKX) ").lower()
|
||||
for item in exchanges:
|
||||
if selection in item.lower():
|
||||
return item
|
||||
print("Invalid input")
|
||||
|
||||
|
||||
if __name__=="__main__":
|
||||
|
||||
|
|
@ -99,6 +123,10 @@ if __name__=="__main__":
|
|||
#print("Invalid input")
|
||||
#sys.exit()
|
||||
port = exchanges[selection]
|
||||
earn_broker = port[1:]
|
||||
if earn_broker=="okex":
|
||||
earn_broker="okx"
|
||||
|
||||
|
||||
|
||||
print("DCAv2 COMMANDER")
|
||||
|
|
@ -132,6 +160,9 @@ if __name__=="__main__":
|
|||
# break
|
||||
selection = select_exchange(exchanges)
|
||||
port = exchanges[selection]
|
||||
earn_broker = port[1:]
|
||||
if earn_broker=="okex":
|
||||
earn_broker="okx"
|
||||
print(f"New exchange selected: {selection}")
|
||||
|
||||
|
||||
|
|
@ -206,6 +237,7 @@ if __name__=="__main__":
|
|||
print("edit_loop_wait_time modifies the pause the instance takes after processing the open orders")
|
||||
print("instance fetch the orders -> instance sends the orders to the traders ->")
|
||||
print("instance waits for the traders to complete their tasks -> instance waits <loop_wait_time> seconds")
|
||||
print(f"Current value is {None}")
|
||||
print("The input value can be an integer or a float")
|
||||
new_wait_time = input("Desired wait time: ")
|
||||
if not validate_float_or_int(new_wait_time):
|
||||
|
|
@ -221,6 +253,7 @@ if __name__=="__main__":
|
|||
print("edit_call_wait_time modifies the pause that the traders take between some API calls")
|
||||
print("This aims to reduce the load on the API endpoints of the broker.")
|
||||
print("The input value can be an integer or a float")
|
||||
print(f"Current value is {None}")
|
||||
new_wait_time = input("Desired call wait time: ")
|
||||
if not validate_float_or_int(new_wait_time):
|
||||
print("The input is invalid")
|
||||
|
|
@ -267,7 +300,8 @@ if __name__=="__main__":
|
|||
|
||||
elif command==15:
|
||||
print("edit_cooldown_multiplier modifies the pause's multiplier after it hits profit.")
|
||||
print("This aims to reduce the volatility when there are big orderbook movements.")
|
||||
print("This aims to reduce the volatility effect on the trader when there are big orderbook movements.")
|
||||
print(f"Current value is {None}")
|
||||
print("The input value can be an integer or a float")
|
||||
new_multiplier = input("Desired multiplier: ")
|
||||
if not validate_float_or_int(new_multiplier):
|
||||
|
|
@ -277,7 +311,206 @@ if __name__=="__main__":
|
|||
url = f"{base_url}{port}/edit_cooldown_multiplier"
|
||||
parameters = {"cooldown_multiplier": new_multiplier}
|
||||
print(json.loads(requests.post(url, headers=headers, json=parameters).content))
|
||||
input("Press ENTER to continue ")
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==16:
|
||||
print("Returns the free balance of a given coin")
|
||||
coin = input("Input currency: ").upper()
|
||||
if input("Proceed? (Y/n) ") in ["Y","y",""]:
|
||||
url = f"{base_url}{port}/get_balance?coin={coin}"
|
||||
print(json.loads(requests.get(url,headers=headers).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==17:
|
||||
print("cancel_global_last_call reverts global_last_call")
|
||||
if input("Proceed? (Y/n) ") in ["Y","y",""]:
|
||||
url = f"{base_url}{port}/cancel_global_last_call"
|
||||
print(json.loads(requests.post(url, headers=headers).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==18:
|
||||
print("mod_default_order_size modifies the default order size that is used when creating a trader")
|
||||
print(f"Current value is {None}")
|
||||
print("The input value can be an integer or a float")
|
||||
new_default_order_size = input("New default order size: ")
|
||||
if not validate_float_or_int(new_default_order_size):
|
||||
print("The input is invalid")
|
||||
break
|
||||
if input("Proceed? (Y/n) ") in ["Y","y",""]:
|
||||
url = f"{base_url}{port}/mod_default_order_size"
|
||||
parameters = {"amount": new_default_order_size}
|
||||
print(json.loads(requests.post(url, headers=headers, json=parameters).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==19:
|
||||
print("toggle_log_orders turns on or off the logging of orders")
|
||||
if input("Proceed? (Y/n) ") in ["Y","y",""]:
|
||||
url = f"{base_url}{port}/toggle_log_orders"
|
||||
print(json.loads(requests.post(url, headers=headers).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==20:
|
||||
print("refresh_log_cache refreshes the log cache")
|
||||
if input("Proceed? (Y/n) ") in ["Y","y",""]:
|
||||
url = f"{base_url}{port}/refresh_log_cache"
|
||||
print(json.loads(requests.post(url, headers=headers).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
######################
|
||||
######## EARN ########
|
||||
######################
|
||||
|
||||
elif command==31:
|
||||
print("toggle_pause interrupts or resume the subcription and redemption of funds")
|
||||
if input(f"This will toggle the subscription and redemption of funds on {port}. Are you sure? (Y/n) ") in ["Y","y",""]:
|
||||
url = f"{base_url}/earn/toggle_pause"
|
||||
parameters = {"broker": earn_broker}
|
||||
print(json.loads(requests.post(url,headers=earn_headers, json=parameters).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==32:
|
||||
print("get_step_size returns the step size")
|
||||
url = f"{base_url}/earn/get_step_size?broker={earn_broker}"
|
||||
print(json.loads(requests.get(url,headers=earn_headers).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==33:
|
||||
print("set_step_size sets the step size")
|
||||
new_step_size = input("New step size? ")
|
||||
if not validate_float_or_int(new_step_size):
|
||||
print("Invalid step size")
|
||||
break
|
||||
if input(f"This will set the step size to {new_step_size}. Are you sure? (Y/n) ") in ["Y","y",""]:
|
||||
url = f"{base_url}/earn/set_step_size"
|
||||
parameters = {"broker": earn_broker, "new_step_size": new_step_size}
|
||||
print(json.loads(requests.post(url,headers=earn_headers, json=parameters).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==34:
|
||||
print("get_percentage displays the percentage of funds to be allocated to earn")
|
||||
url = f"{base_url}/earn/get_percentage?broker={earn_broker}"
|
||||
print(json.loads(requests.get(url,headers=earn_headers).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==35:
|
||||
print("set_percentage sets the percentage of funds to be allocated to earn")
|
||||
new_percentage = input("New percentage? ")
|
||||
if not validate_float_or_int(new_percentage):
|
||||
print("Invalid percentage")
|
||||
break
|
||||
if input(f"This will set the percentage to {new_percentage}. Are you sure? (Y/n) ") in ["Y","y",""]:
|
||||
url = f"{base_url}/earn/set_percentage"
|
||||
parameters = {"broker": earn_broker, "new_percentage": new_percentage}
|
||||
print(json.loads(requests.post(url,headers=earn_headers, json=parameters).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==36:
|
||||
print("get_time_between_subscriptions displays the time to wait between subscriptions")
|
||||
url = f"{base_url}/earn/get_time_between_subscriptions?broker={earn_broker}"
|
||||
print(json.loads(requests.get(url,headers=earn_headers).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==37:
|
||||
print("set_time_between_subscriptions sets the time to wait between subscriptions")
|
||||
new_time_between_subscriptions = input("New time between subscriptions? ")
|
||||
if not validate_int(new_time_between_subscriptions):
|
||||
print("Invalid time")
|
||||
break
|
||||
if input(f"This will set the time to wait between subscriptions to {new_time_between_subscriptions}. Are you sure? (Y/n) ") in ["Y","y",""]:
|
||||
url = f"{base_url}/earn/set_time_between_subscriptions"
|
||||
parameters = {"broker": earn_broker, "new_time_between_subscriptions": new_time_between_subscriptions}
|
||||
print(json.loads(requests.post(url,headers=earn_headers, json=parameters).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==38:
|
||||
print("get_time_between_redemptions displays the time to wait between redemptions")
|
||||
url = f"{base_url}/earn/get_time_between_redemptions?broker={earn_broker}"
|
||||
print(json.loads(requests.get(url,headers=earn_headers).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==39:
|
||||
print("set_time_between_redemptions sets the time to wait between redemptions")
|
||||
new_time_between_redemptions = input("New time between redemptions? ")
|
||||
if not validate_int(new_time_between_redemptions):
|
||||
print("Invalid time")
|
||||
break
|
||||
if input(f"This will set the time to wait between redemptions to {new_time_between_redemptions}. Are you sure? (Y/n) ") in ["Y","y",""]:
|
||||
url = f"{base_url}/earn/set_time_between_redemptions"
|
||||
parameters = {"broker": earn_broker, "new_time_between_redemptions": new_time_between_redemptions}
|
||||
print(json.loads(requests.post(url,headers=earn_headers, json=parameters).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==40:
|
||||
print("get_minimum_amount_in_trading_account displays the minimum amount of funds that always have to exist in the trading account")
|
||||
url = f"{base_url}/earn/get_minimum_amount_in_trading_account?broker={earn_broker}"
|
||||
print(json.loads(requests.get(url,headers=earn_headers).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==41:
|
||||
print("set_minimum_amount_in_trading_account sets the minimum amount of funds that always have to exist in the trading account")
|
||||
new_minimum_amount_in_trading_account = input("New minimum amount in trading account? ")
|
||||
if not validate_int(new_minimum_amount_in_trading_account):
|
||||
print("Invalid amount")
|
||||
break
|
||||
if input(f"This will set the minimum amount of funds that always have to exist in the trading account to {new_minimum_amount_in_trading_account}. Are you sure? (Y/n) ") in ["Y","y",""]:
|
||||
url = f"{base_url}/earn/set_minimum_amount_in_trading_account"
|
||||
parameters = {"broker": earn_broker, "new_minimum_amount_in_trading_account": new_minimum_amount_in_trading_account}
|
||||
print(json.loads(requests.post(url,headers=earn_headers, json=parameters).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==42:
|
||||
print("get_last_subscription display the last subscription")
|
||||
url = f"{base_url}/earn/get_last_subscription?broker={earn_broker}"
|
||||
print(json.loads(requests.get(url,headers=earn_headers).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==43:
|
||||
print("get_last_redemptions displays the last redemption")
|
||||
url = f"{base_url}/earn/get_last_redemption?broker={earn_broker}"
|
||||
print(json.loads(requests.get(url,headers=earn_headers).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==44:
|
||||
print("get_total_balance displays the trading account balance and the earning account balance")
|
||||
url = f"{base_url}/earn/get_total_balance?broker={earn_broker}"
|
||||
print(json.loads(requests.get(url,headers=earn_headers).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==45:
|
||||
print("get_global_status returns the status of all the earners.")
|
||||
url = f"{base_url}/earn/get_global_status"
|
||||
print(json.loads(requests.get(url,headers=earn_headers).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==46:
|
||||
print("subscribe forces funds subscription")
|
||||
amount_to_subscribe = input("Enter the amount to subscribe: ")
|
||||
if not validate_float_or_int(amount_to_subscribe):
|
||||
print("The input is invalid")
|
||||
break
|
||||
if input("Proceed? (Y/n) ") in ["Y","y",""]:
|
||||
url = f"{base_url}/earn/subscribe"
|
||||
parameters = {
|
||||
"broker": earn_broker,
|
||||
"amount": amount_to_subscribe}
|
||||
print(json.loads(requests.post(url,headers=earn_headers, json=parameters).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==47:
|
||||
print("redeem forces funds redemption")
|
||||
amount_to_redeem = input("Enter the amount to redeem: ")
|
||||
if not validate_float_or_int(amount_to_redeem):
|
||||
print("The input is invalid")
|
||||
break
|
||||
if input("Proceed? (Y/n) ") in ["Y","y",""]:
|
||||
url = f"{base_url}/earn/redeem"
|
||||
parameters = {
|
||||
"broker": earn_broker,
|
||||
"amount": amount_to_redeem}
|
||||
print(json.loads(requests.post(url,headers=earn_headers, json=parameters).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
|
||||
|
||||
######################
|
||||
####### TRADER #######
|
||||
|
|
@ -349,6 +582,7 @@ if __name__=="__main__":
|
|||
print("In order for the importing to be successful, a status file must exist in the status directory ")
|
||||
print("and the take profit order must be open.")
|
||||
trading_pair = input("Input trader in the format BASE/QUOTE: ").upper()
|
||||
|
||||
if not validate_pair(trading_pair):
|
||||
print("The input is invalid")
|
||||
break
|
||||
|
|
@ -577,22 +811,8 @@ if __name__=="__main__":
|
|||
"new_quote": new_quote}
|
||||
print(json.loads(requests.post(url, headers=headers, json=parameters).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
|
||||
elif command==70:
|
||||
print("reload_safety_order reloads the safety order to the reader using the order id present in the status dictionary")
|
||||
trading_pair = input("Input trader in the format BASE/QUOTE: ").upper()
|
||||
if not validate_pair(trading_pair):
|
||||
print("The input is invalid")
|
||||
break
|
||||
if input("Proceed? (Y/n) ") in ["Y","y",""]:
|
||||
url = f"{base_url}{port}/reload_safety_order"
|
||||
base,quote = trading_pair.split("/")
|
||||
parameters = {"base": base,
|
||||
"quote": quote}
|
||||
print(json.loads(requests.post(url, headers=headers, json=parameters).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==71:
|
||||
print("Views the old_long information")
|
||||
trading_pair = input("Input trader in the format BASE/QUOTE: ").upper()
|
||||
if not validate_pair(trading_pair):
|
||||
|
|
@ -605,7 +825,7 @@ if __name__=="__main__":
|
|||
print(json.loads(requests.get(url,headers=headers).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==72:
|
||||
elif command==71:
|
||||
print("Returns the price target to reach to switch to long mode")
|
||||
trading_pair = input("Input trader in the format BASE/QUOTE: ").upper()
|
||||
if not validate_pair(trading_pair):
|
||||
|
|
@ -617,23 +837,96 @@ if __name__=="__main__":
|
|||
print(json.loads(requests.get(url,headers=headers).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==73:
|
||||
print("Returns backtests of the pairs available in an exchange")
|
||||
broker = input("Exchange? (binance, gateio, kucoin or okx): ")
|
||||
amount = input("Amount of days to consider? ")
|
||||
max_rank = input("Maximum CoinMarketCap rank? ")
|
||||
if not validate_int(amount):
|
||||
print("The amount of days specified is invalid")
|
||||
break
|
||||
if not validate_int(max_rank):
|
||||
print("The max_rank specified is invalid")
|
||||
elif command==72:
|
||||
print("Reloads from disk the configuration file of a trader")
|
||||
trading_pair = input("Input trader in the format BASE/QUOTE: ").upper()
|
||||
if not validate_pair(trading_pair):
|
||||
print("The input is invalid")
|
||||
break
|
||||
if input("Proceed? (Y/n) ") in ["Y","y",""]:
|
||||
url = f"{base_url}/statistics_server/fetch_backtests?exchange_name={broker}&days={amount}&max_rank={max_rank}"
|
||||
result = json.loads(requests.get(url,headers=headers).content)
|
||||
#for item in result:
|
||||
# print(item, round(result[item],2))
|
||||
sorted_result = {key: value for key, value in sorted(result.items(),key=lambda item: item[1])}
|
||||
for item in sorted_result:
|
||||
print(item, sorted_result[item])
|
||||
url = f"{base_url}{port}/reload_trader_config"
|
||||
base,quote = trading_pair.split("/")
|
||||
parameters = {"base": base,
|
||||
"quote": quote}
|
||||
print(json.loads(requests.post(url, headers=headers, json=parameters).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==73:
|
||||
print("toggle_liquidate_after_switch enables or disables the liquidation after an automatic switch to long of a short trader")
|
||||
print("This is only valid in a short trader, of course.")
|
||||
trading_pair = input("Input trader in the format BASE/QUOTE: ").upper()
|
||||
if not validate_pair(trading_pair):
|
||||
print("The input is invalid")
|
||||
break
|
||||
if input("Proceed? (Y/n) ") in ["Y","y",""]:
|
||||
url = f"{base_url}{port}/toggle_liquidate_after_switch"
|
||||
base,quote = trading_pair.split("/")
|
||||
parameters = {"base": base,
|
||||
"quote": quote}
|
||||
print(json.loads(requests.post(url, headers=headers, json=parameters).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==74:
|
||||
print("Returns the amount of safety orders that can be added to a short trader with the available funds")
|
||||
trading_pair = input("Input trader in the format BASE/QUOTE: ").upper()
|
||||
if not validate_pair(trading_pair):
|
||||
print("The input is invalid")
|
||||
break
|
||||
if input("Proceed? (Y/n) ") in ["Y","y",""]:
|
||||
base,quote = trading_pair.split("/")
|
||||
url = f"{base_url}{port}/base_add_so_calculation?base={base}"e={quote}"
|
||||
print(json.loads(requests.get(url,headers=headers).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==75:
|
||||
print("mod_concurrent_safety_orders modifies the amount of safety orders opened at the same time")
|
||||
trading_pair = input("Input trader in the format BASE/QUOTE: ").upper()
|
||||
new_amount = input("Desired amount of orders: ")
|
||||
if not validate_pair(trading_pair):
|
||||
print("The input is invalid")
|
||||
break
|
||||
if not validate_int(new_amount):
|
||||
print("The amount entered is invalid")
|
||||
break
|
||||
if input("Proceed? (Y/n) ") in ["Y","y",""]:
|
||||
url = f"{base_url}{port}/mod_concurrent_safety_orders"
|
||||
base,quote = trading_pair.split("/")
|
||||
parameters = {"base": base,
|
||||
"quote": quote,
|
||||
"amount": new_amount}
|
||||
print(json.loads(requests.post(url, headers=headers, json=parameters).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==76:
|
||||
print("force_trader_close forces a trader to close the current position")
|
||||
trading_pair = input("Input trader in the format BASE/QUOTE: ").upper()
|
||||
if not validate_pair(trading_pair):
|
||||
print("The input is invalid")
|
||||
break
|
||||
if input("Proceed? (Y/n) ") in ["Y","y",""]:
|
||||
url = f"{base_url}{port}/force_trader_close"
|
||||
base,quote = trading_pair.split("/")
|
||||
parameters = {"base": base,
|
||||
"quote": quote}
|
||||
print(json.loads(requests.post(url, headers=headers, json=parameters).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
||||
elif command==77:
|
||||
print("mod_order_size modifies the initial order size of a trader")
|
||||
print("The change impacts as soon as the trader starts a new deal")
|
||||
trading_pair = input("Input trader in the format BASE/QUOTE: ").upper()
|
||||
amount = input("Desired order size: ")
|
||||
if not validate_pair(trading_pair):
|
||||
print("The input is invalid")
|
||||
break
|
||||
if not validate_float_or_int(amount):
|
||||
print("The amount entered is invalid")
|
||||
break
|
||||
if input("Proceed? (Y/n) ") in ["Y","y",""]:
|
||||
url = f"{base_url}{port}/mod_order_size"
|
||||
base,quote = trading_pair.split("/")
|
||||
parameters = {"base": base,
|
||||
"quote": quote,
|
||||
"amount": amount}
|
||||
print(json.loads(requests.post(url, headers=headers, json=parameters).content))
|
||||
input("Press ENTER to continue ")
|
||||
|
|
@ -8,9 +8,17 @@ import time
|
|||
import sqlite3
|
||||
import math
|
||||
import statistics
|
||||
import sys
|
||||
from credentials import get_credentials
|
||||
from threading import Thread
|
||||
|
||||
def write_to_log(message):
|
||||
'''
|
||||
Writes a message to the log file
|
||||
'''
|
||||
with open("log.txt", "a") as f:
|
||||
f.write(f"{message}\n")
|
||||
|
||||
|
||||
def yang_zhang(candles):
|
||||
'''
|
||||
|
|
@ -103,17 +111,56 @@ def get_pair_list(broker, inclusions = ["/USDT"], exclusions = []):
|
|||
return pair_list
|
||||
|
||||
|
||||
def fetch_data(broker: str, pair_list: list, timeframe: str, samples: int):
|
||||
def fetch_data(broker: str, pair: str, timeframe: str, samples: int):
|
||||
"""
|
||||
Fetch data from exchange
|
||||
"""
|
||||
|
||||
global volatilities
|
||||
|
||||
wait_time = .5 #Sleep time between requests
|
||||
|
||||
index = 0
|
||||
|
||||
exchange = getattr(ccxt, broker)
|
||||
trading_volume = 0
|
||||
index += 1
|
||||
try:
|
||||
data = exchange.fetch_ohlcv(pair,timeframe=timeframe,limit=samples)
|
||||
except Exception as e:
|
||||
write_to_log(str(e))
|
||||
return None
|
||||
try:
|
||||
parkinson_volatility = parkinson(data)
|
||||
yangzhang_volatility = yang_zhang(data)
|
||||
except Exception as e:
|
||||
write_to_log(str(e))
|
||||
return None
|
||||
for item in data:
|
||||
trading_volume += item[4]*item[5]
|
||||
|
||||
print(f"{pair} on {broker} ready, {len(pair_list)-index} pairs remaining.")
|
||||
return [round(yangzhang_volatility*100,2),round(parkinson_volatility*100,2),int(trading_volume)]
|
||||
|
||||
|
||||
if __name__=="__main__":
|
||||
|
||||
threads = []
|
||||
volatilities = {}
|
||||
samples = 288
|
||||
timeframe = "5m"
|
||||
minimum_volume = 0
|
||||
wait_time = .5
|
||||
|
||||
#Create database if it does not exist
|
||||
database_connection = sqlite3.connect(f"{get_credentials('VOL_DB_PATH')['path']}{sys.argv[1]}.db")
|
||||
database_cursor = database_connection.cursor()
|
||||
database_cursor.execute('''
|
||||
CREATE TABLE IF NOT EXISTS volatilities_table (
|
||||
pair TEXT,
|
||||
timestamp INTEGER,
|
||||
yang_zhang REAL,
|
||||
parkinson REAL,
|
||||
volume REAL)''')
|
||||
database_connection.commit()
|
||||
database_connection.close()
|
||||
|
||||
exchange = getattr(ccxt, sys.argv[1])
|
||||
exchange = exchange({
|
||||
"apiKey": "",
|
||||
"secret": "",
|
||||
|
|
@ -122,63 +169,13 @@ def fetch_data(broker: str, pair_list: list, timeframe: str, samples: int):
|
|||
"enableRateLimit": True
|
||||
})
|
||||
|
||||
pair_list = get_pair_list(sys.argv[1],inclusions = ["/USDT"], exclusions = [])
|
||||
|
||||
for pair in pair_list:
|
||||
trading_volume = 0
|
||||
index += 1
|
||||
try:
|
||||
data = exchange.fetch_ohlcv(pair,timeframe=timeframe,limit=samples)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
continue
|
||||
try:
|
||||
parkinson_volatility = parkinson(data)
|
||||
yangzhang_volatility = yang_zhang(data)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
continue
|
||||
for item in data:
|
||||
trading_volume += item[4]*item[5]
|
||||
volatilities[broker][pair] = [round(yangzhang_volatility*100,2),round(parkinson_volatility*100,2),int(trading_volume)]
|
||||
print(f"{pair} on {broker} ready, {len(pair_list)-index} pairs remaining.")
|
||||
data = fetch_data(exchange, pair, timeframe, samples)
|
||||
if data is not None:
|
||||
volatilities[pair] = data
|
||||
time.sleep(wait_time)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__=="__main__":
|
||||
|
||||
threads = []
|
||||
exchanges = ["binance","gateio","kucoin","okx"]
|
||||
pair_list = []
|
||||
volatilities = {item:{} for item in exchanges}
|
||||
exchange_list = [item for item in volatilities]
|
||||
samples = 288
|
||||
timeframe = "5m"
|
||||
minimum_volume = 0
|
||||
|
||||
#Create databases for each exchange
|
||||
for item in exchange_list:
|
||||
database_connection = sqlite3.connect(f"{get_credentials('VOL_DB_PATH')['path']}{item}.db")
|
||||
database_cursor = database_connection.cursor()
|
||||
database_cursor.execute('''
|
||||
CREATE TABLE IF NOT EXISTS volatilities_table (
|
||||
pair TEXT,
|
||||
timestamp INTEGER,
|
||||
yang_zhang REAL,
|
||||
parkinson REAL,
|
||||
volume REAL)''')
|
||||
database_connection.commit()
|
||||
database_connection.close()
|
||||
|
||||
|
||||
for broker in exchange_list:
|
||||
threads.append(Thread(target=fetch_data,args=(broker, get_pair_list(broker), timeframe, samples,)))
|
||||
|
||||
for thread in threads:
|
||||
thread.start()
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
|
||||
for item in exchange_list:
|
||||
write_to_db(item,volatilities[item])
|
||||
write_to_db(sys.argv[1],volatilities)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,96 +1,209 @@
|
|||
import sqlite3
|
||||
import sys
|
||||
import datetime
|
||||
import time
|
||||
import ccxt
|
||||
import credentials
|
||||
import requests
|
||||
import calendar
|
||||
import logging
|
||||
import threading
|
||||
import os
|
||||
from contextlib import contextmanager
|
||||
from flask import Flask, jsonify, request
|
||||
|
||||
|
||||
'''
|
||||
In case the certificate's permissions suddenly change (in auto renewal, for example), reset them this way:
|
||||
/ sudo su
|
||||
# chmod -R 755 /etc/letsencrypt/live/
|
||||
# chmod -R 755 /etc/letsencrypt/archive/
|
||||
# ll /etc/letsencrypt/ (to verify permissions)
|
||||
'''
|
||||
|
||||
cache_requests = False
|
||||
if len(sys.argv)>1 and sys.argv[1]=="--cache_requests":
|
||||
cache_requests = True
|
||||
from waitress import serve
|
||||
|
||||
|
||||
profits_database = "../profits/profits_database.db"
|
||||
hashes_db = {"fetch_last_n_deals":0,
|
||||
"fetch_last_n_deals_without_history":0,
|
||||
"fetch_full_log":0,
|
||||
"fetch_log":0,
|
||||
"daily_totals":0,
|
||||
"daily_totals_by_pair":0,
|
||||
"monthly_totals":0,
|
||||
"monthly_totals_by_pair":0,
|
||||
"get_averages":0,
|
||||
"total_profit":0,
|
||||
"total_profit_by_pair":0}
|
||||
_local_storage = threading.local()
|
||||
|
||||
def get_db_connection():
|
||||
current_time = time.time()
|
||||
|
||||
if not hasattr(_local_storage, 'connection') or not hasattr(_local_storage, 'created_at') or (current_time - _local_storage.created_at) > 3600: # Reconnect every hour
|
||||
if hasattr(_local_storage, 'connection'):
|
||||
try:
|
||||
_local_storage.connection.close()
|
||||
except:
|
||||
pass
|
||||
_local_storage.connection = sqlite3.connect(profits_database, check_same_thread=False)
|
||||
_local_storage.connection.row_factory = sqlite3.Row
|
||||
_local_storage.created_at = current_time
|
||||
|
||||
def get_market_caps(limit):
|
||||
api_key = credentials.get_credentials("CMC")["key"]
|
||||
url = f"https://pro-api.coinmarketcap.com/v1/cryptocurrency/listings/latest?CMC_PRO_API_KEY={api_key}&convert=USD&limit={limit}"
|
||||
return requests.get(url).json()["data"]
|
||||
return _local_storage.connection
|
||||
|
||||
@contextmanager
|
||||
def db_cursor():
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor()
|
||||
try:
|
||||
yield cur
|
||||
except Exception:
|
||||
conn.rollback()
|
||||
raise
|
||||
|
||||
def load_keys_from_db(file_name):
|
||||
#valid_keys = []
|
||||
|
||||
connection = sqlite3.connect(file_name)
|
||||
connection = sqlite3.connect(file_name)
|
||||
cursor = connection.cursor()
|
||||
cursor.execute("SELECT * FROM credentials_table")
|
||||
data = cursor.fetchall()
|
||||
connection.close()
|
||||
|
||||
valid_keys = [line[1] for line in data]
|
||||
#for line in data:
|
||||
# valid_keys.append(line[1])
|
||||
|
||||
return valid_keys
|
||||
|
||||
|
||||
def get_valid_keys():
|
||||
if not hasattr(get_valid_keys, '_keys'):
|
||||
get_valid_keys._keys = load_keys_from_db("api_credentials.db")
|
||||
return get_valid_keys._keys
|
||||
|
||||
|
||||
def profit_report():
|
||||
##Queries
|
||||
#Last 60 days query
|
||||
with db_cursor() as cursor:
|
||||
cursor.execute("""SELECT strftime('%Y-%m-%d', timestamp, 'unixepoch', '-3 hours') AS day_utc3,
|
||||
SUM(amount) AS total_amount
|
||||
FROM profits_table
|
||||
WHERE strftime('%s', 'now') - timestamp <= 60 * 24 * 60 * 60 -- 60 days in seconds
|
||||
GROUP BY day_utc3
|
||||
ORDER BY day_utc3;""")
|
||||
last_60_days_rows = cursor.fetchall()
|
||||
#Last 30 days query
|
||||
with db_cursor() as cursor:
|
||||
cursor.execute("""SELECT strftime('%Y-%m-%d', timestamp, 'unixepoch', '-3 hours') AS day_utc3,
|
||||
SUM(amount) AS total_amount
|
||||
FROM profits_table
|
||||
WHERE strftime('%s', 'now') - timestamp <= 30 * 24 * 60 * 60 -- 30 days in seconds;""")
|
||||
last_30_days = cursor.fetchall()
|
||||
#Last 7 days query
|
||||
with db_cursor() as cursor:
|
||||
cursor.execute("""SELECT strftime('%Y-%m-%d', timestamp, 'unixepoch', '-3 hours') AS day_utc3,
|
||||
SUM(amount) AS total_amount
|
||||
FROM profits_table
|
||||
WHERE strftime('%s', 'now') - timestamp <= 7 * 24 * 60 * 60 -- 7 days in seconds;""")
|
||||
last_7_days = cursor.fetchall()
|
||||
#Last n months query
|
||||
with db_cursor() as cursor:
|
||||
cursor.execute("""SELECT strftime('%Y-%m', timestamp, 'unixepoch', '-3 hours') AS year_month_utc3,
|
||||
SUM(amount) AS total_amount
|
||||
FROM profits_table
|
||||
WHERE strftime('%s', 'now') - timestamp <= 18 * 30 * 24 * 60 * 60 -- 18 months in seconds
|
||||
GROUP BY year_month_utc3
|
||||
ORDER BY year_month_utc3;""")
|
||||
last_n_months_rows = cursor.fetchall()
|
||||
#Yearly totals
|
||||
# cursor.execute("""SELECT strftime('%Y', timestamp, 'unixepoch', '-3 hours') AS year_utc3,
|
||||
# SUM(amount) AS total_amount
|
||||
# FROM profits_table
|
||||
# WHERE strftime('%s', 'now') - timestamp <= 24 * 365 * 60 * 60 -- 365 days in seconds
|
||||
# GROUP BY year_utc3
|
||||
# ORDER BY year_utc3;""")
|
||||
# yearly_totals = cursor.fetchall()
|
||||
#Per exchange
|
||||
with db_cursor() as cursor:
|
||||
cursor.execute("""SELECT
|
||||
exchange_name,
|
||||
CASE
|
||||
WHEN strftime('%Y-%m', timestamp, 'unixepoch', '-3 hours') = strftime('%Y-%m', 'now', 'localtime') THEN 'This Month'
|
||||
WHEN strftime('%Y-%m', timestamp, 'unixepoch', '-3 hours') = strftime('%Y-%m', 'now', 'localtime', '-1 month') THEN 'Last Month'
|
||||
ELSE 'Other Months'
|
||||
END AS month_group,
|
||||
SUM(amount) AS total_amount
|
||||
FROM
|
||||
profits_table
|
||||
WHERE
|
||||
strftime('%s', 'now') - timestamp <= 60 * 24 * 60 * 60 -- 60 days in seconds
|
||||
GROUP BY
|
||||
exchange_name, month_group
|
||||
ORDER BY
|
||||
exchange_name, month_group;""")
|
||||
per_exchange = cursor.fetchall()
|
||||
|
||||
|
||||
#Projection calculation
|
||||
days_in_month = calendar.monthrange(datetime.date.today().year, datetime.date.today().month)[1]
|
||||
daily_combined_media = (last_30_days[0][1]/30+last_7_days[0][1]/7)/2
|
||||
current_amount = last_n_months_rows[-1][1]
|
||||
days_past_this_month = int(last_60_days_rows[-1][0][8:10])
|
||||
|
||||
#Per exchange
|
||||
binance_amount = 0
|
||||
gateio_amount = 0
|
||||
kucoin_amount = 0
|
||||
okex_amount = 0
|
||||
|
||||
for row in per_exchange:
|
||||
exchange_name = row[0].strip().lower()
|
||||
if exchange_name=="binance":
|
||||
if row[1]=="This Month":
|
||||
binance_amount = row[2]
|
||||
elif exchange_name=="gateio":
|
||||
if row[1]=="This Month":
|
||||
gateio_amount = row[2]
|
||||
elif exchange_name=="kucoin":
|
||||
if row[1]=="This Month":
|
||||
kucoin_amount = row[2]
|
||||
elif exchange_name=="okex":
|
||||
if row[1]=="This Month":
|
||||
okex_amount = row[2]
|
||||
|
||||
total_amount = binance_amount+gateio_amount+kucoin_amount+okex_amount
|
||||
|
||||
last_60_days_result = {row[0]: round(row[1],2) for row in last_60_days_rows}
|
||||
last_18_months_result = {row[0]: round(row[1],2) for row in last_n_months_rows}
|
||||
last_30_days_average = last_30_days[0][1]/30
|
||||
last_7_days_average = last_7_days[0][1]/7
|
||||
this_month_projection = current_amount + daily_combined_media*(days_in_month-days_past_this_month)
|
||||
binance_percentage = binance_amount/total_amount*100
|
||||
gateio_percentage = gateio_amount/total_amount*100
|
||||
kucoin_percentage = kucoin_amount/total_amount*100
|
||||
okex_percentage = okex_amount/total_amount*100
|
||||
|
||||
return {"Last 60 days": last_60_days_result,
|
||||
"Last 18 months": last_18_months_result,
|
||||
"Last 30 days average": last_30_days_average,
|
||||
"Last 7 days average": last_7_days_average,
|
||||
"This month projection": this_month_projection,
|
||||
"Binance": binance_amount,
|
||||
"Binance percentage": binance_percentage,
|
||||
"Gateio": gateio_amount,
|
||||
"Gateio percentage": gateio_percentage,
|
||||
"Kucoin": kucoin_amount,
|
||||
"Kucoin percentage": kucoin_percentage,
|
||||
"OKX": okex_amount,
|
||||
"OKX percentage": okex_percentage,
|
||||
"Total profit": total_amount}
|
||||
|
||||
|
||||
def query_total_profit(pair=None):
|
||||
'''
|
||||
Returns total profit of the trading pair.
|
||||
If no pair specified, returns the grand total of all pairs.
|
||||
'''
|
||||
connection = sqlite3.connect(profits_database)
|
||||
cursor = connection.cursor()
|
||||
|
||||
if pair is None:
|
||||
query = "SELECT SUM(amount) AS total_profit FROM profits_table"
|
||||
cursor.execute(query)
|
||||
connection.close()
|
||||
query_result = cursor.fetchall()
|
||||
with db_cursor() as cursor:
|
||||
cursor.execute(query)
|
||||
query_result = cursor.fetchall()
|
||||
return query_result[0][0]
|
||||
else:
|
||||
query = """SELECT pair, SUM(amount) AS total_profit
|
||||
FROM profits_table
|
||||
GROUP BY pair;"""
|
||||
cursor.execute(query)
|
||||
connection.close()
|
||||
query_result = cursor.fetchall()
|
||||
with db_cursor() as cursor:
|
||||
cursor.execute(query)
|
||||
query_result = cursor.fetchall()
|
||||
for item in query_result:
|
||||
if item[0].replace("/","")==pair:
|
||||
return item[1]
|
||||
return 0
|
||||
|
||||
|
||||
def daily_and_monthly_totals():
|
||||
def daily_and_monthly_totals() -> tuple[float, float]:
|
||||
'''
|
||||
Returns a tuple with the current day and the current month's total profit.
|
||||
'''
|
||||
#Connect to db
|
||||
connection = sqlite3.connect(profits_database)
|
||||
cursor = connection.cursor()
|
||||
|
||||
now = datetime.datetime.now()
|
||||
|
||||
# Create a datetime object for the start of the day
|
||||
|
|
@ -100,16 +213,17 @@ def daily_and_monthly_totals():
|
|||
# Convert the start of the day to Unix time
|
||||
start_of_day_unix = int(time.mktime(start_of_day.timetuple()))
|
||||
start_of_month_unix = int(time.mktime(start_of_month.timetuple()))
|
||||
|
||||
query = f"""SELECT * FROM profits_table
|
||||
WHERE timestamp >= {start_of_month_unix}
|
||||
ORDER BY timestamp DESC;"""
|
||||
cursor.execute(query)
|
||||
query_result = cursor.fetchall()
|
||||
connection.close()
|
||||
|
||||
monthly_total = sum([item[2] for item in query_result])
|
||||
daily_total = sum([item[2] for item in query_result if item[0]>=start_of_day_unix])
|
||||
|
||||
query = """SELECT
|
||||
COALESCE(SUM(CASE WHEN timestamp >= :day THEN amount END),0) AS daily_total,
|
||||
COALESCE(SUM(CASE WHEN timestamp >= :month THEN amount END),0) AS monthly_total
|
||||
FROM profits_table;
|
||||
"""
|
||||
with db_cursor() as cur:
|
||||
cur.execute(query, {"day": start_of_day_unix, "month": start_of_month_unix})
|
||||
row = cur.fetchone()
|
||||
daily_total = float(row["daily_total"])
|
||||
monthly_total = float(row["monthly_total"])
|
||||
|
||||
return (daily_total, monthly_total)
|
||||
|
||||
|
|
@ -119,9 +233,6 @@ def query_daily_totals(pair=None):
|
|||
Returns a dictionary of daily totals of the trading pair.
|
||||
If no pair specified, returns the totals of all pairs.
|
||||
'''
|
||||
#Connect to db
|
||||
connection = sqlite3.connect(profits_database)
|
||||
cursor = connection.cursor()
|
||||
|
||||
result = {}
|
||||
|
||||
|
|
@ -130,19 +241,19 @@ def query_daily_totals(pair=None):
|
|||
SUM(amount) AS total_profit
|
||||
FROM profits_table
|
||||
GROUP BY day_utc3;"""
|
||||
cursor.execute(query)
|
||||
query_result = cursor.fetchall()
|
||||
connection.close()
|
||||
with db_cursor() as cursor:
|
||||
cursor.execute(query)
|
||||
query_result = cursor.fetchall()
|
||||
for item in query_result:
|
||||
result[item[0]] = item[1]
|
||||
else:
|
||||
query = """SELECT pair, strftime('%Y-%m-%d', timestamp, 'unixepoch', '-3 hours') AS day_utc3,
|
||||
SUM(amount) AS total_profit
|
||||
FROM profits_table
|
||||
GROUP BY pair, day_utc3;"""
|
||||
cursor.execute(query)
|
||||
query_result = cursor.fetchall()
|
||||
connection.close()
|
||||
GROUP BY pair, day_utc3;"""
|
||||
with db_cursor() as cursor:
|
||||
cursor.execute(query)
|
||||
query_result = cursor.fetchall()
|
||||
for item in query_result:
|
||||
if item[0].replace("/","")==pair:
|
||||
result[item[1]] = item[2]
|
||||
|
|
@ -154,9 +265,6 @@ def query_monthly_totals(pair=None):
|
|||
Returns a dictionary of monthly totals of the trading pair.
|
||||
If no pair specified, returns the totals of all pairs.
|
||||
'''
|
||||
#Connect to db
|
||||
connection = sqlite3.connect(profits_database)
|
||||
cursor = connection.cursor()
|
||||
|
||||
result = {}
|
||||
|
||||
|
|
@ -165,19 +273,19 @@ def query_monthly_totals(pair=None):
|
|||
SUM(amount) AS total_profit
|
||||
FROM profits_table
|
||||
GROUP BY month;"""
|
||||
cursor.execute(query)
|
||||
query_result = cursor.fetchall()
|
||||
connection.close()
|
||||
with db_cursor() as cursor:
|
||||
cursor.execute(query)
|
||||
query_result = cursor.fetchall()
|
||||
for item in query_result:
|
||||
result[item[0]] = item[1]
|
||||
else:
|
||||
query = f"""SELECT pair, strftime('%Y-%m', datetime(timestamp, 'unixepoch', '-3 hours')) AS month,
|
||||
query = """SELECT pair, strftime('%Y-%m', datetime(timestamp, 'unixepoch', '-3 hours')) AS month,
|
||||
SUM(amount) AS total_profit
|
||||
FROM profits_table
|
||||
GROUP BY pair, month;"""
|
||||
cursor.execute(query)
|
||||
query_result = cursor.fetchall()
|
||||
connection.close()
|
||||
with db_cursor() as cursor:
|
||||
cursor.execute(query)
|
||||
query_result = cursor.fetchall()
|
||||
for item in query_result:
|
||||
if item[0].replace("/","")==pair:
|
||||
result[item[1]] = item[2]
|
||||
|
|
@ -188,11 +296,9 @@ def last_n_deals(n):
|
|||
'''
|
||||
Returns a list of the latest n deals
|
||||
'''
|
||||
connection = sqlite3.connect(profits_database)
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(f"SELECT * FROM profits_table ORDER BY timestamp DESC LIMIT ?",(n,))
|
||||
result = cursor.fetchall()
|
||||
connection.close()
|
||||
with db_cursor() as cursor:
|
||||
cursor.execute("SELECT * FROM profits_table ORDER BY timestamp DESC LIMIT ?",(n,))
|
||||
result = cursor.fetchall()
|
||||
|
||||
return result
|
||||
|
||||
|
|
@ -219,8 +325,6 @@ def last_n_lines(file_name,width,amount=4,full_log=False):
|
|||
return result,len(file_contents)
|
||||
|
||||
for line in file_contents[::-1][:amount]:
|
||||
#trimmed = f"{line[0]}{line[12:21]}{line[23:]}".strip()
|
||||
#result.append(trimmed[:width])
|
||||
trimmed = line.strip()
|
||||
result.append(trimmed[:width])
|
||||
if len(trimmed)>width:
|
||||
|
|
@ -228,119 +332,50 @@ def last_n_lines(file_name,width,amount=4,full_log=False):
|
|||
return result[:amount],len(file_contents)
|
||||
|
||||
|
||||
def return_parkinson_backtests(broker, days, max_rank):
|
||||
'''
|
||||
Returns a dictionary containing backtests with the format {coin: value}
|
||||
'''
|
||||
if broker not in ["binance", "gateio", "kucoin", "okx"]:
|
||||
return {}
|
||||
|
||||
evaluation_dictionary = {}
|
||||
start_of_day = int(time.mktime(datetime.datetime.now().date().timetuple()))
|
||||
since = int(start_of_day - 60*60*24*days)
|
||||
def tail_log(filename, lines=200):
|
||||
if not os.path.exists(filename):
|
||||
return []
|
||||
|
||||
# Getting the data from the database
|
||||
print("Querying database...")
|
||||
conn = sqlite3.connect(f"data/{broker}.db")
|
||||
cursor = conn.cursor()
|
||||
cursor.execute('SELECT * FROM volatilities_table WHERE timestamp > ?', (since,))
|
||||
rows = cursor.fetchall()
|
||||
conn.close()
|
||||
block_size = 1024
|
||||
blocks = []
|
||||
with open(filename, 'rb') as f:
|
||||
f.seek(0, 2)
|
||||
#total_bytes = remaining_bytes = f.tell()
|
||||
remaining_bytes = f.tell()
|
||||
|
||||
# Parse the data
|
||||
print("Parsing the data...")
|
||||
for row in rows:
|
||||
if row[0] not in evaluation_dictionary:
|
||||
evaluation_dictionary[row[0]] = [row[2]]
|
||||
else:
|
||||
evaluation_dictionary[row[0]].append(row[2])
|
||||
while len(blocks) < lines and remaining_bytes > 0:
|
||||
read_bytes = min(block_size, remaining_bytes)
|
||||
f.seek(-read_bytes, 1)
|
||||
block = f.read(read_bytes).splitlines()
|
||||
f.seek(-read_bytes, 1)
|
||||
|
||||
#Calculate weighted averages
|
||||
print("Calculating weighted averages")
|
||||
weighted_averages = {}
|
||||
for key in evaluation_dictionary:
|
||||
multiplier = len(evaluation_dictionary[key])
|
||||
total = 0
|
||||
for value in evaluation_dictionary[key][::-1]:
|
||||
total+=value*multiplier/len(evaluation_dictionary[key])
|
||||
multiplier-=1
|
||||
weighted_averages[key] = total/len(evaluation_dictionary[key])
|
||||
# Prepend to blocks (since we're reading backwards)
|
||||
blocks = block[-(len(blocks)+1):] + blocks
|
||||
remaining_bytes -= read_bytes
|
||||
|
||||
#Filter by rank
|
||||
print("Filtering results by CMC rank")
|
||||
coins_accepted = []
|
||||
market_caps = get_market_caps(max_rank)
|
||||
for result in market_caps:
|
||||
coins_accepted.append(result["symbol"])
|
||||
|
||||
for coin in weighted_averages.copy():
|
||||
if coin.split("/")[0] not in coins_accepted:
|
||||
del(weighted_averages[coin])
|
||||
|
||||
|
||||
#Checking open markets
|
||||
print("Filtering results by market state")
|
||||
exchange_class = getattr(ccxt, broker)
|
||||
broker = exchange_class({
|
||||
"apiKey": "",
|
||||
"secret": "",
|
||||
"timeout": 30000,
|
||||
"enableRateLimit": True,
|
||||
'options': {
|
||||
'newOrderRespType': 'FULL'}
|
||||
})
|
||||
|
||||
markets = broker.load_markets()
|
||||
for key in weighted_averages.copy():
|
||||
if key not in markets or not markets[key]["active"]:
|
||||
del(weighted_averages[key])
|
||||
|
||||
return weighted_averages
|
||||
# Decode and filter empty lines
|
||||
result = [line.decode('utf-8', errors='ignore').strip() for line in blocks if line.strip()]
|
||||
return result[-lines:],len(result[-lines:])
|
||||
|
||||
|
||||
stats_api = Flask(__name__)
|
||||
|
||||
@stats_api.route("/fetch_backtests")
|
||||
def fetch_backtests():
|
||||
@stats_api.route("/fetch_profit_report")
|
||||
def fetch_profit_report():
|
||||
'''
|
||||
GET request
|
||||
Parameters: 'exchange_name" -> string
|
||||
'days' -> int
|
||||
'max_rank' -> int
|
||||
Parameters: None
|
||||
Returns: JSON object with profit report data
|
||||
'''
|
||||
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
||||
try:
|
||||
broker = request.args.get("exchange_name")
|
||||
days = int(request.args.get("days")) # type: ignore
|
||||
max_rank = int(request.args.get("max_rank")) # type: ignore
|
||||
return return_parkinson_backtests(broker,days,max_rank)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return jsonify({"HORROR": f"{e}"})
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
|
||||
|
||||
@stats_api.route("/clear_caches")
|
||||
def clear_hashes():
|
||||
global hashes_db
|
||||
|
||||
'''
|
||||
GET request
|
||||
'''
|
||||
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
||||
hashes_db = {"fetch_last_n_deals":0,
|
||||
"fetch_last_n_deals_without_history":0,
|
||||
"fetch_full_log":0,
|
||||
"fetch_log":0,
|
||||
"daily_totals":0,
|
||||
"daily_totals_by_pair":0,
|
||||
"monthly_totals":0,
|
||||
"monthly_totals_by_pair":0,
|
||||
"get_averages":0,
|
||||
"total_profit":0,
|
||||
"total_profit_by_pair":0}
|
||||
return jsonify({"Done":0})
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
try:
|
||||
return jsonify(profit_report())
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return jsonify({"Error": f"{e}"})
|
||||
|
||||
|
||||
|
||||
@stats_api.route("/fetch_last_n_deals")
|
||||
|
|
@ -349,21 +384,15 @@ def fetch_last_n_deals():
|
|||
GET request
|
||||
Parameter: 'amount_of_deals' -> int
|
||||
'''
|
||||
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
||||
try:
|
||||
parameter = request.args.get("amount_of_deals")
|
||||
response_value = last_n_deals(parameter)
|
||||
if not cache_requests:
|
||||
return jsonify({"last_deals": response_value})
|
||||
response_hash = hash(str({"last_deals": response_value}))
|
||||
if hashes_db["fetch_last_n_deals"]!=response_hash:
|
||||
hashes_db["fetch_last_n_deals"] = response_hash
|
||||
return jsonify({"last_deals": response_value})
|
||||
return jsonify({"no_changes": True})
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return jsonify({"last_deals":""})
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
try:
|
||||
parameter = request.args.get("amount_of_deals")
|
||||
response_value = last_n_deals(parameter)
|
||||
return jsonify({"last_deals": response_value})
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return jsonify({"last_deals":""})
|
||||
|
||||
|
||||
@stats_api.route("/fetch_last_n_deals_without_history")
|
||||
|
|
@ -372,22 +401,16 @@ def fetch_last_n_deals_without_history():
|
|||
GET request
|
||||
Parameter: 'amount_of_deals' -> int
|
||||
'''
|
||||
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
||||
try:
|
||||
parameter = request.args.get("amount_of_deals")
|
||||
#return jsonify({"last_deals": last_n_deals_without_history(parameter)})
|
||||
response_value = last_n_deals_without_history(parameter)
|
||||
if not cache_requests:
|
||||
return jsonify({"last_deals": response_value})
|
||||
response_hash = hash(str({"last_deals": response_value}))
|
||||
if hashes_db["fetch_last_n_deals_without_history"]!=response_hash:
|
||||
hashes_db["fetch_last_n_deals_without_history"] = response_hash
|
||||
return jsonify({"last_deals": response_value})
|
||||
return jsonify({"no_changes": True})
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return jsonify({"last_deals":""})
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
try:
|
||||
parameter = request.args.get("amount_of_deals")
|
||||
#return jsonify({"last_deals": last_n_deals_without_history(parameter)})
|
||||
response_value = last_n_deals_without_history(parameter)
|
||||
return jsonify({"last_deals": response_value})
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return jsonify({"last_deals":""})
|
||||
|
||||
|
||||
@stats_api.route("/fetch_full_log")
|
||||
|
|
@ -395,23 +418,19 @@ def fetch_full_log():
|
|||
'''
|
||||
GET request
|
||||
Parameters: 'exchange_name" -> string
|
||||
|
||||
It trims the full log to 200 lines, to avoid sending too much data to the client.
|
||||
'''
|
||||
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
||||
try:
|
||||
exchange_name = request.args.get("exchange_name")
|
||||
width = 0
|
||||
last_lines,amount_of_lines = last_n_lines(f"../logs/{exchange_name}.log",width,0,full_log=True)
|
||||
if not cache_requests:
|
||||
return jsonify({"line": last_lines, "amount_of_lines": amount_of_lines})
|
||||
response_hash = hash(str({"line": last_lines, "amount_of_lines": amount_of_lines}))
|
||||
if hashes_db["fetch_full_log"]!=response_hash:
|
||||
hashes_db["fetch_full_log"] = response_hash
|
||||
return jsonify({"line": last_lines, "amount_of_lines": amount_of_lines})
|
||||
return jsonify({"no_changes": True})
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return {"line": [""]*width,"amount_of_lines": 0}
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
try:
|
||||
exchange_name = request.args.get("exchange_name")
|
||||
width = 0
|
||||
last_lines, amount_of_lines = tail_log(f"../logs/{exchange_name}.log", 200)
|
||||
return jsonify({"line": last_lines[-200:], "amount_of_lines": amount_of_lines})
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return {"line": [""]*width,"amount_of_lines": 0}
|
||||
|
||||
|
||||
@stats_api.route("/fetch_log")
|
||||
|
|
@ -422,45 +441,33 @@ def fetch_log():
|
|||
'width' -> int
|
||||
'amount' -> int
|
||||
'''
|
||||
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
||||
try:
|
||||
exchange_name = request.args.get("exchange_name")
|
||||
width = int(request.args.get("width")) # type: ignore
|
||||
amount = int(request.args.get("amount")) # type: ignore
|
||||
last_lines,total_amount_of_lines = last_n_lines(f"../logs/{exchange_name}.log",width,amount)
|
||||
if not cache_requests:
|
||||
return jsonify({"line": last_lines, "amount_of_lines": total_amount_of_lines})
|
||||
response_hash = hash(str({"line": last_lines, "amount_of_lines": total_amount_of_lines}))
|
||||
if hashes_db["fetch_log"]!=response_hash:
|
||||
hashes_db["fetch_log"] = response_hash
|
||||
return jsonify({"line": last_lines, "amount_of_lines": total_amount_of_lines})
|
||||
return jsonify({"no_changes": True})
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return {"line": [""]*10,"amount_of_lines": 0}
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
try:
|
||||
exchange_name = request.args.get("exchange_name")
|
||||
width = int(request.args.get("width")) # type: ignore
|
||||
amount = int(request.args.get("amount")) # type: ignore
|
||||
last_lines,total_amount_of_lines = last_n_lines(f"../logs/{exchange_name}.log",width,amount)
|
||||
return jsonify({"line": last_lines, "amount_of_lines": total_amount_of_lines})
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return {"line": [""]*10,"amount_of_lines": 0}
|
||||
|
||||
|
||||
@stats_api.route("/combined_totals")
|
||||
def combined_totals():
|
||||
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
||||
daily_totals = daily_and_monthly_totals()
|
||||
return jsonify({"combined": daily_totals})
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
daily_totals = daily_and_monthly_totals()
|
||||
return jsonify({"combined": daily_totals})
|
||||
|
||||
|
||||
@stats_api.route("/daily_totals")
|
||||
def get_daily_totals():
|
||||
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
||||
daily_totals = query_daily_totals()
|
||||
if not cache_requests:
|
||||
return jsonify(daily_totals)
|
||||
response_hash = hash(str(daily_totals))
|
||||
if hashes_db["daily_totals"]!=response_hash:
|
||||
hashes_db["daily_totals"] = response_hash
|
||||
return jsonify(daily_totals)
|
||||
return jsonify({"no_changes": True})
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
daily_totals = query_daily_totals()
|
||||
return jsonify(daily_totals)
|
||||
|
||||
|
||||
@stats_api.route("/daily_totals_by_pair")
|
||||
|
|
@ -470,36 +477,24 @@ def get_daily_totals_by_pair():
|
|||
Parameters: 'base' -> string
|
||||
'quote' -> string
|
||||
'''
|
||||
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
||||
try:
|
||||
base = request.args.get("base")
|
||||
quote = request.args.get("quote")
|
||||
daily_totals = query_daily_totals(f"{base}{quote}")
|
||||
if not cache_requests:
|
||||
return jsonify(daily_totals)
|
||||
response_hash = hash(str(daily_totals))
|
||||
if hashes_db["daily_totals_by_pair"]!=response_hash:
|
||||
hashes_db["daily_totals_by_pair"] = response_hash
|
||||
return jsonify(daily_totals)
|
||||
return jsonify({"no_changes": True})
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return jsonify({'Error': 'Halp'})
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
try:
|
||||
base = request.args.get("base")
|
||||
quote = request.args.get("quote")
|
||||
daily_totals = query_daily_totals(f"{base}{quote}")
|
||||
return jsonify(daily_totals)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return jsonify({'Error': 'Halp'})
|
||||
|
||||
|
||||
@stats_api.route("/monthly_totals")
|
||||
def get_monthly_totals():
|
||||
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
||||
monthly_totals = query_monthly_totals()
|
||||
if not cache_requests:
|
||||
return jsonify(monthly_totals)
|
||||
response_hash = hash(str(monthly_totals))
|
||||
if hashes_db["monthly_totals"]!=response_hash:
|
||||
hashes_db["monthly_totals"] = response_hash
|
||||
return jsonify(monthly_totals)
|
||||
return jsonify({"no_changes": True})
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
monthly_totals = query_monthly_totals()
|
||||
return jsonify(monthly_totals)
|
||||
|
||||
|
||||
@stats_api.route("/monthly_totals_by_pair")
|
||||
|
|
@ -509,67 +504,47 @@ def get_monthly_totals_by_pair():
|
|||
Parameters: 'base' -> string
|
||||
'quote' -> string
|
||||
'''
|
||||
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
||||
try:
|
||||
base = request.args.get("base")
|
||||
quote = request.args.get("quote")
|
||||
monthly_totals = query_monthly_totals(f"{base}{quote}")
|
||||
if not cache_requests:
|
||||
return jsonify(monthly_totals)
|
||||
response_hash = hash(str(monthly_totals))
|
||||
if hashes_db["monthly_totals_by_pair"]!=response_hash:
|
||||
hashes_db["monthly_totals_by_pair"] = response_hash
|
||||
return jsonify(monthly_totals)
|
||||
return jsonify({"no_changes": True})
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return jsonify({'Error': 'Halp'})
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
try:
|
||||
base = request.args.get("base")
|
||||
quote = request.args.get("quote")
|
||||
monthly_totals = query_monthly_totals(f"{base}{quote}")
|
||||
return jsonify(monthly_totals)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return jsonify({'Error': 'Halp'})
|
||||
|
||||
|
||||
@stats_api.route("/get_averages")
|
||||
def get_averages():
|
||||
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
||||
try:
|
||||
daily_totals = query_daily_totals()
|
||||
val_30 = 0
|
||||
val_7 = 0
|
||||
acc_30 = []
|
||||
acc_7 = []
|
||||
for x in sorted(daily_totals):
|
||||
acc_30.append(daily_totals[x])
|
||||
acc_7.append(daily_totals[x])
|
||||
length_30 = min(30,len(acc_30)) #Last 30 days
|
||||
length_7 = min(7,len(acc_7)) #Last 7 days
|
||||
for _ in range(length_30):
|
||||
val_30 += acc_30.pop()
|
||||
for _ in range(length_7):
|
||||
val_7 += acc_7.pop()
|
||||
if not cache_requests:
|
||||
return jsonify({"30_day": val_30/length_30, "7_day": val_7/length_7})
|
||||
response_hash = hash(str({"30_day": val_30/length_30, "7_day": val_7/length_7}))
|
||||
if hashes_db["get_averages"]!=response_hash:
|
||||
hashes_db["get_averages"] = response_hash
|
||||
return jsonify({"30_day": val_30/length_30, "7_day": val_7/length_7})
|
||||
return jsonify({"no_changes": True})
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return jsonify({'Error': 'Halp'})
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
try:
|
||||
daily_totals = query_daily_totals()
|
||||
val_30 = 0
|
||||
val_7 = 0
|
||||
recent_days = sorted(daily_totals.keys(), reverse=True)[:30]
|
||||
acc_30 = [daily_totals[date] for date in recent_days[:30]]
|
||||
acc_7 = [daily_totals[date] for date in recent_days[:7]]
|
||||
length_30 = min(30,len(acc_30)) #Last 30 days
|
||||
length_7 = min(7,len(acc_7)) #Last 7 days
|
||||
for _ in range(length_30):
|
||||
val_30 += acc_30.pop()
|
||||
for _ in range(length_7):
|
||||
val_7 += acc_7.pop()
|
||||
return jsonify({"30_day": val_30/length_30, "7_day": val_7/length_7})
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return jsonify({'Error': 'Halp'})
|
||||
|
||||
|
||||
@stats_api.route("/total_profit")
|
||||
def total_profit():
|
||||
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
||||
total = query_total_profit()
|
||||
if not cache_requests:
|
||||
return jsonify({"Total profit": total})
|
||||
response_hash = hash(str({"Total profit": total}))
|
||||
if hashes_db["total_profit"]!=response_hash:
|
||||
hashes_db["total_profit"] = response_hash
|
||||
return jsonify({"Total profit": total})
|
||||
return jsonify({"no_changes": True})
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
total = query_total_profit()
|
||||
return jsonify({"Total profit": total})
|
||||
|
||||
|
||||
@stats_api.route("/total_profit_by_pair")
|
||||
|
|
@ -579,33 +554,26 @@ def total_profit_by_pair():
|
|||
Parameters: 'base' -> string
|
||||
'quote' -> string
|
||||
'''
|
||||
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
||||
try:
|
||||
base = request.args.get("base")
|
||||
quote = request.args.get("quote")
|
||||
total = query_total_profit(f"{base}{quote}")
|
||||
if not cache_requests:
|
||||
return jsonify({"Total profit": total})
|
||||
response_hash = hash(str({"Total profit": total}))
|
||||
if hashes_db["total_profit_by_pair"]!=response_hash:
|
||||
hashes_db["total_profit_by_pair"] = response_hash
|
||||
return jsonify({"Total profit": total})
|
||||
return jsonify({"no_changes": True})
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return jsonify({'Error': 'Halp'})
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
|
||||
return jsonify({'Error': 'API key invalid'}), 401
|
||||
try:
|
||||
base = request.args.get("base")
|
||||
quote = request.args.get("quote")
|
||||
total = query_total_profit(f"{base}{quote}")
|
||||
return jsonify({"Total profit": total})
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return jsonify({'Error': 'Halp'})
|
||||
|
||||
|
||||
|
||||
if __name__=="__main__":
|
||||
|
||||
# Load valid keys from database
|
||||
valid_keys = load_keys_from_db("api_credentials.db")
|
||||
|
||||
#Waitress
|
||||
#serve(stats_api,host="0.0.0.0",port=5010)
|
||||
|
||||
#Dev server
|
||||
stats_api.run(host="0.0.0.0",port=5010)
|
||||
logger = logging.getLogger('waitress')
|
||||
logger.setLevel(logging.INFO)
|
||||
serve(stats_api,host="0.0.0.0",port=5010)
|
||||
|
||||
#Flask
|
||||
# app.run(host="0.0.0.0", port=5010, debug=True)
|
||||
|
||||
Loading…
Reference in New Issue