├── .deepsource.toml
├── .gitignore
├── .pep8speaks.yml
├── Docker.md
├── Dockerfile
├── LICENSE
├── README.md
├── config.env.sample
├── docker-compose.yml
├── genStr
├── genStr.bat
├── install_req
├── install_req.bat
├── loader
├── __init__.py
├── __main__.py
├── core
│ ├── __init__.py
│ ├── checks.py
│ ├── main.py
│ ├── menu.py
│ ├── methods.py
│ ├── types.py
│ └── utils.py
├── job.py
├── types.py
└── userge
│ ├── __init__.py
│ ├── api.py
│ ├── connection.py
│ └── main.py
├── menu
├── menu.bat
├── requirements.txt
├── run
├── run.bat
└── tools
└── genstr.py
/.deepsource.toml:
--------------------------------------------------------------------------------
1 | version = 1
2 |
3 | [[analyzers]]
4 | name = "python"
5 | enabled = true
6 | dependency_file_paths = ["requirements.txt"]
7 |
8 | [analyzers.meta]
9 | runtime_version = "3.x.x"
10 | max_line_length = 100
11 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # pipenv
88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
91 | # install all needed dependencies.
92 | #Pipfile.lock
93 |
94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95 | __pypackages__/
96 |
97 | # Celery stuff
98 | celerybeat-schedule
99 | celerybeat.pid
100 |
101 | # SageMath parsed files
102 | *.sage.py
103 |
104 | # Environments
105 | env/
106 | .env
107 | .venv
108 | env/
109 | venv/
110 | ENV/
111 | env.bak/
112 | venv.bak/
113 |
114 | # Spyder project settings
115 | .spyderproject
116 | .spyproject
117 |
118 | # Rope project settings
119 | .ropeproject
120 |
121 | # mkdocs documentation
122 | /site
123 |
124 | # mypy
125 | .mypy_cache/
126 | .dmypy.json
127 | dmypy.json
128 |
129 | # Pyre type checker
130 | .pyre/
131 |
132 | # config files
133 | .apt/
134 | .heroku/
135 | .profile.d/
136 | vendor/
137 | config.env
138 | config.env.tmp
139 | .vscode/
140 | .idea/
141 | *.session
142 | unknown_errors.txt
143 | logs/
144 | .rcache
145 | userge/*
146 | test.py
147 |
--------------------------------------------------------------------------------
/.pep8speaks.yml:
--------------------------------------------------------------------------------
1 | # File : .pep8speaks.yml
2 |
3 | scanner:
4 | linter: flake8
5 |
6 | flake8:
7 | max-line-length: 100
8 | ignore:
9 | - W503 # line break before binary operator
10 |
11 | message:
12 | opened:
13 | header: "@{name}, Thanks for opening this PR."
14 | updated:
15 | header: "@{name}, Thanks for updating this PR."
16 |
--------------------------------------------------------------------------------
/Docker.md:
--------------------------------------------------------------------------------
1 | # Docker Guide For Userge 🐳 #
2 |
3 | ## Install docker ##
4 | - Follow the official docker [installation guide](https://docs.docker.com/engine/install/ubuntu/)
5 |
6 | ## Install Docker-compose ##
7 | - Easiest way to install docker-compose is
8 | ```sudo pip install docker-compose```
9 | - Also you can check other official methods of installing docker-compose [here](https://docs.docker.com/compose/install/)
10 |
11 | ## Run Userge ##
12 | - We dont need to clone the repo (yeah Docker-compose does that for us)
13 | - Setup configs
14 | - Download the sample config file
15 | - ```mkdir userge && cd userge```
16 | - ```wget https://raw.githubusercontent.com/UsergeTeam/Loader/master/config.env.sample -O config.env```
17 | - ```vim config.env```
18 | - Download the yml file for docker-compose
19 | - ```wget https://raw.githubusercontent.com/UsergeTeam/Loader/master/docker-compose.yml```
20 | - Finally start the bot
21 | ```docker-compose up -d```
22 | - The bot should be running now
23 | Check logs with ```docker-compose logs -f```
24 |
25 | ## How to stop the bot ##
26 | - Stop Command
27 | ```docker-compose stop```
28 | - This will just stop the containers. Built images won't be removed. So next time you can start with ``docker-compose start`` command
29 | And it won't take time for building from scratch
30 |
31 | - Down command
32 | ```docker-compose down```
33 | - You will stop and delete the built images also. So next time you have to do ``docker-compose up -d`` to start the bot
34 |
35 | ### Q&A ###
36 | - How to see logs
37 | `docker-compose logs -f`
38 | - How to update
39 | `docker-compose up -d`
40 | Changes will be fetched from git repo. You can change repo url from _docker-compose.yml_ file
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # set base image (host OS)
2 | FROM python:3.9
3 |
4 | # set the working directory in the container
5 | WORKDIR /app/
6 |
7 | RUN apt -qq update
8 | RUN apt -qq install -y --no-install-recommends \
9 | curl \
10 | git \
11 | gnupg2 \
12 | unzip \
13 | wget \
14 | ffmpeg
15 |
16 | # install chrome
17 | RUN mkdir -p /tmp/ && \
18 | cd /tmp/ && \
19 | wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb && \
20 | # -f ==> is required to --fix-missing-dependancies
21 | dpkg -i ./google-chrome-stable_current_amd64.deb; apt -fqqy install && \
22 | # clean up the container "layer", after we are done
23 | rm ./google-chrome-stable_current_amd64.deb
24 |
25 | # install chromedriver
26 | RUN mkdir -p /tmp/ && \
27 | cd /tmp/ && \
28 | wget -O /tmp/chromedriver.zip http://chromedriver.storage.googleapis.com/$(curl -sS chromedriver.storage.googleapis.com/LATEST_RELEASE)/chromedriver_linux64.zip && \
29 | unzip /tmp/chromedriver.zip chromedriver -d /usr/bin/ && \
30 | # clean up the container "layer", after we are done
31 | rm /tmp/chromedriver.zip
32 |
33 | ENV GOOGLE_CHROME_DRIVER /usr/bin/chromedriver
34 | ENV GOOGLE_CHROME_BIN /usr/bin/google-chrome-stable
35 |
36 | # install node-js
37 | RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - && \
38 | apt-get install -y nodejs && \
39 | npm i -g npm
40 |
41 | # install rar
42 | RUN mkdir -p /tmp/ && \
43 | cd /tmp/ && \
44 | wget -O /tmp/rarlinux.tar.gz http://www.rarlab.com/rar/rarlinux-x64-6.0.0.tar.gz && \
45 | tar -xzvf rarlinux.tar.gz && \
46 | cd rar && \
47 | cp -v rar unrar /usr/bin/ && \
48 | # clean up
49 | rm -rf /tmp/rar*
50 |
51 | # copy the content of the local src directory to the working directory
52 | COPY . .
53 |
54 | # install dependencies
55 | RUN pip install -r requirements.txt
56 |
57 | # command to run on container start
58 | CMD [ "bash", "./run" ]
59 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 | Preamble
9 |
10 | The GNU General Public License is a free, copyleft license for
11 | software and other kinds of works.
12 |
13 | The licenses for most software and other practical works are designed
14 | to take away your freedom to share and change the works. By contrast,
15 | the GNU General Public License is intended to guarantee your freedom to
16 | share and change all versions of a program--to make sure it remains free
17 | software for all its users. We, the Free Software Foundation, use the
18 | GNU General Public License for most of our software; it applies also to
19 | any other work released this way by its authors. You can apply it to
20 | your programs, too.
21 |
22 | When we speak of free software, we are referring to freedom, not
23 | price. Our General Public Licenses are designed to make sure that you
24 | have the freedom to distribute copies of free software (and charge for
25 | them if you wish), that you receive source code or can get it if you
26 | want it, that you can change the software or use pieces of it in new
27 | free programs, and that you know you can do these things.
28 |
29 | To protect your rights, we need to prevent others from denying you
30 | these rights or asking you to surrender the rights. Therefore, you have
31 | certain responsibilities if you distribute copies of the software, or if
32 | you modify it: responsibilities to respect the freedom of others.
33 |
34 | For example, if you distribute copies of such a program, whether
35 | gratis or for a fee, you must pass on to the recipients the same
36 | freedoms that you received. You must make sure that they, too, receive
37 | or can get the source code. And you must show them these terms so they
38 | know their rights.
39 |
40 | Developers that use the GNU GPL protect your rights with two steps:
41 | (1) assert copyright on the software, and (2) offer you this License
42 | giving you legal permission to copy, distribute and/or modify it.
43 |
44 | For the developers' and authors' protection, the GPL clearly explains
45 | that there is no warranty for this free software. For both users' and
46 | authors' sake, the GPL requires that modified versions be marked as
47 | changed, so that their problems will not be attributed erroneously to
48 | authors of previous versions.
49 |
50 | Some devices are designed to deny users access to install or run
51 | modified versions of the software inside them, although the manufacturer
52 | can do so. This is fundamentally incompatible with the aim of
53 | protecting users' freedom to change the software. The systematic
54 | pattern of such abuse occurs in the area of products for individuals to
55 | use, which is precisely where it is most unacceptable. Therefore, we
56 | have designed this version of the GPL to prohibit the practice for those
57 | products. If such problems arise substantially in other domains, we
58 | stand ready to extend this provision to those domains in future versions
59 | of the GPL, as needed to protect the freedom of users.
60 |
61 | Finally, every program is threatened constantly by software patents.
62 | States should not allow patents to restrict development and use of
63 | software on general-purpose computers, but in those that do, we wish to
64 | avoid the special danger that patents applied to a free program could
65 | make it effectively proprietary. To prevent this, the GPL assures that
66 | patents cannot be used to render the program non-free.
67 |
68 | The precise terms and conditions for copying, distribution and
69 | modification follow.
70 |
71 | TERMS AND CONDITIONS
72 |
73 | 0. Definitions.
74 |
75 | "This License" refers to version 3 of the GNU General Public License.
76 |
77 | "Copyright" also means copyright-like laws that apply to other kinds of
78 | works, such as semiconductor masks.
79 |
80 | "The Program" refers to any copyrightable work licensed under this
81 | License. Each licensee is addressed as "you". "Licensees" and
82 | "recipients" may be individuals or organizations.
83 |
84 | To "modify" a work means to copy from or adapt all or part of the work
85 | in a fashion requiring copyright permission, other than the making of an
86 | exact copy. The resulting work is called a "modified version" of the
87 | earlier work or a work "based on" the earlier work.
88 |
89 | A "covered work" means either the unmodified Program or a work based
90 | on the Program.
91 |
92 | To "propagate" a work means to do anything with it that, without
93 | permission, would make you directly or secondarily liable for
94 | infringement under applicable copyright law, except executing it on a
95 | computer or modifying a private copy. Propagation includes copying,
96 | distribution (with or without modification), making available to the
97 | public, and in some countries other activities as well.
98 |
99 | To "convey" a work means any kind of propagation that enables other
100 | parties to make or receive copies. Mere interaction with a user through
101 | a computer network, with no transfer of a copy, is not conveying.
102 |
103 | An interactive user interface displays "Appropriate Legal Notices"
104 | to the extent that it includes a convenient and prominently visible
105 | feature that (1) displays an appropriate copyright notice, and (2)
106 | tells the user that there is no warranty for the work (except to the
107 | extent that warranties are provided), that licensees may convey the
108 | work under this License, and how to view a copy of this License. If
109 | the interface presents a list of user commands or options, such as a
110 | menu, a prominent item in the list meets this criterion.
111 |
112 | 1. Source Code.
113 |
114 | The "source code" for a work means the preferred form of the work
115 | for making modifications to it. "Object code" means any non-source
116 | form of a work.
117 |
118 | A "Standard Interface" means an interface that either is an official
119 | standard defined by a recognized standards body, or, in the case of
120 | interfaces specified for a particular programming language, one that
121 | is widely used among developers working in that language.
122 |
123 | The "System Libraries" of an executable work include anything, other
124 | than the work as a whole, that (a) is included in the normal form of
125 | packaging a Major Component, but which is not part of that Major
126 | Component, and (b) serves only to enable use of the work with that
127 | Major Component, or to implement a Standard Interface for which an
128 | implementation is available to the public in source code form. A
129 | "Major Component", in this context, means a major essential component
130 | (kernel, window system, and so on) of the specific operating system
131 | (if any) on which the executable work runs, or a compiler used to
132 | produce the work, or an object code interpreter used to run it.
133 |
134 | The "Corresponding Source" for a work in object code form means all
135 | the source code needed to generate, install, and (for an executable
136 | work) run the object code and to modify the work, including scripts to
137 | control those activities. However, it does not include the work's
138 | System Libraries, or general-purpose tools or generally available free
139 | programs which are used unmodified in performing those activities but
140 | which are not part of the work. For example, Corresponding Source
141 | includes interface definition files associated with source files for
142 | the work, and the source code for shared libraries and dynamically
143 | linked subprograms that the work is specifically designed to require,
144 | such as by intimate data communication or control flow between those
145 | subprograms and other parts of the work.
146 |
147 | The Corresponding Source need not include anything that users
148 | can regenerate automatically from other parts of the Corresponding
149 | Source.
150 |
151 | The Corresponding Source for a work in source code form is that
152 | same work.
153 |
154 | 2. Basic Permissions.
155 |
156 | All rights granted under this License are granted for the term of
157 | copyright on the Program, and are irrevocable provided the stated
158 | conditions are met. This License explicitly affirms your unlimited
159 | permission to run the unmodified Program. The output from running a
160 | covered work is covered by this License only if the output, given its
161 | content, constitutes a covered work. This License acknowledges your
162 | rights of fair use or other equivalent, as provided by copyright law.
163 |
164 | You may make, run and propagate covered works that you do not
165 | convey, without conditions so long as your license otherwise remains
166 | in force. You may convey covered works to others for the sole purpose
167 | of having them make modifications exclusively for you, or provide you
168 | with facilities for running those works, provided that you comply with
169 | the terms of this License in conveying all material for which you do
170 | not control copyright. Those thus making or running the covered works
171 | for you must do so exclusively on your behalf, under your direction
172 | and control, on terms that prohibit them from making any copies of
173 | your copyrighted material outside their relationship with you.
174 |
175 | Conveying under any other circumstances is permitted solely under
176 | the conditions stated below. Sublicensing is not allowed; section 10
177 | makes it unnecessary.
178 |
179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
180 |
181 | No covered work shall be deemed part of an effective technological
182 | measure under any applicable law fulfilling obligations under article
183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
184 | similar laws prohibiting or restricting circumvention of such
185 | measures.
186 |
187 | When you convey a covered work, you waive any legal power to forbid
188 | circumvention of technological measures to the extent such circumvention
189 | is effected by exercising rights under this License with respect to
190 | the covered work, and you disclaim any intention to limit operation or
191 | modification of the work as a means of enforcing, against the work's
192 | users, your or third parties' legal rights to forbid circumvention of
193 | technological measures.
194 |
195 | 4. Conveying Verbatim Copies.
196 |
197 | You may convey verbatim copies of the Program's source code as you
198 | receive it, in any medium, provided that you conspicuously and
199 | appropriately publish on each copy an appropriate copyright notice;
200 | keep intact all notices stating that this License and any
201 | non-permissive terms added in accord with section 7 apply to the code;
202 | keep intact all notices of the absence of any warranty; and give all
203 | recipients a copy of this License along with the Program.
204 |
205 | You may charge any price or no price for each copy that you convey,
206 | and you may offer support or warranty protection for a fee.
207 |
208 | 5. Conveying Modified Source Versions.
209 |
210 | You may convey a work based on the Program, or the modifications to
211 | produce it from the Program, in the form of source code under the
212 | terms of section 4, provided that you also meet all of these conditions:
213 |
214 | a) The work must carry prominent notices stating that you modified
215 | it, and giving a relevant date.
216 |
217 | b) The work must carry prominent notices stating that it is
218 | released under this License and any conditions added under section
219 | 7. This requirement modifies the requirement in section 4 to
220 | "keep intact all notices".
221 |
222 | c) You must license the entire work, as a whole, under this
223 | License to anyone who comes into possession of a copy. This
224 | License will therefore apply, along with any applicable section 7
225 | additional terms, to the whole of the work, and all its parts,
226 | regardless of how they are packaged. This License gives no
227 | permission to license the work in any other way, but it does not
228 | invalidate such permission if you have separately received it.
229 |
230 | d) If the work has interactive user interfaces, each must display
231 | Appropriate Legal Notices; however, if the Program has interactive
232 | interfaces that do not display Appropriate Legal Notices, your
233 | work need not make them do so.
234 |
235 | A compilation of a covered work with other separate and independent
236 | works, which are not by their nature extensions of the covered work,
237 | and which are not combined with it such as to form a larger program,
238 | in or on a volume of a storage or distribution medium, is called an
239 | "aggregate" if the compilation and its resulting copyright are not
240 | used to limit the access or legal rights of the compilation's users
241 | beyond what the individual works permit. Inclusion of a covered work
242 | in an aggregate does not cause this License to apply to the other
243 | parts of the aggregate.
244 |
245 | 6. Conveying Non-Source Forms.
246 |
247 | You may convey a covered work in object code form under the terms
248 | of sections 4 and 5, provided that you also convey the
249 | machine-readable Corresponding Source under the terms of this License,
250 | in one of these ways:
251 |
252 | a) Convey the object code in, or embodied in, a physical product
253 | (including a physical distribution medium), accompanied by the
254 | Corresponding Source fixed on a durable physical medium
255 | customarily used for software interchange.
256 |
257 | b) Convey the object code in, or embodied in, a physical product
258 | (including a physical distribution medium), accompanied by a
259 | written offer, valid for at least three years and valid for as
260 | long as you offer spare parts or customer support for that product
261 | model, to give anyone who possesses the object code either (1) a
262 | copy of the Corresponding Source for all the software in the
263 | product that is covered by this License, on a durable physical
264 | medium customarily used for software interchange, for a price no
265 | more than your reasonable cost of physically performing this
266 | conveying of source, or (2) access to copy the
267 | Corresponding Source from a network server at no charge.
268 |
269 | c) Convey individual copies of the object code with a copy of the
270 | written offer to provide the Corresponding Source. This
271 | alternative is allowed only occasionally and noncommercially, and
272 | only if you received the object code with such an offer, in accord
273 | with subsection 6b.
274 |
275 | d) Convey the object code by offering access from a designated
276 | place (gratis or for a charge), and offer equivalent access to the
277 | Corresponding Source in the same way through the same place at no
278 | further charge. You need not require recipients to copy the
279 | Corresponding Source along with the object code. If the place to
280 | copy the object code is a network server, the Corresponding Source
281 | may be on a different server (operated by you or a third party)
282 | that supports equivalent copying facilities, provided you maintain
283 | clear directions next to the object code saying where to find the
284 | Corresponding Source. Regardless of what server hosts the
285 | Corresponding Source, you remain obligated to ensure that it is
286 | available for as long as needed to satisfy these requirements.
287 |
288 | e) Convey the object code using peer-to-peer transmission, provided
289 | you inform other peers where the object code and Corresponding
290 | Source of the work are being offered to the general public at no
291 | charge under subsection 6d.
292 |
293 | A separable portion of the object code, whose source code is excluded
294 | from the Corresponding Source as a System Library, need not be
295 | included in conveying the object code work.
296 |
297 | A "User Product" is either (1) a "consumer product", which means any
298 | tangible personal property which is normally used for personal, family,
299 | or household purposes, or (2) anything designed or sold for incorporation
300 | into a dwelling. In determining whether a product is a consumer product,
301 | doubtful cases shall be resolved in favor of coverage. For a particular
302 | product received by a particular user, "normally used" refers to a
303 | typical or common use of that class of product, regardless of the status
304 | of the particular user or of the way in which the particular user
305 | actually uses, or expects or is expected to use, the product. A product
306 | is a consumer product regardless of whether the product has substantial
307 | commercial, industrial or non-consumer uses, unless such uses represent
308 | the only significant mode of use of the product.
309 |
310 | "Installation Information" for a User Product means any methods,
311 | procedures, authorization keys, or other information required to install
312 | and execute modified versions of a covered work in that User Product from
313 | a modified version of its Corresponding Source. The information must
314 | suffice to ensure that the continued functioning of the modified object
315 | code is in no case prevented or interfered with solely because
316 | modification has been made.
317 |
318 | If you convey an object code work under this section in, or with, or
319 | specifically for use in, a User Product, and the conveying occurs as
320 | part of a transaction in which the right of possession and use of the
321 | User Product is transferred to the recipient in perpetuity or for a
322 | fixed term (regardless of how the transaction is characterized), the
323 | Corresponding Source conveyed under this section must be accompanied
324 | by the Installation Information. But this requirement does not apply
325 | if neither you nor any third party retains the ability to install
326 | modified object code on the User Product (for example, the work has
327 | been installed in ROM).
328 |
329 | The requirement to provide Installation Information does not include a
330 | requirement to continue to provide support service, warranty, or updates
331 | for a work that has been modified or installed by the recipient, or for
332 | the User Product in which it has been modified or installed. Access to a
333 | network may be denied when the modification itself materially and
334 | adversely affects the operation of the network or violates the rules and
335 | protocols for communication across the network.
336 |
337 | Corresponding Source conveyed, and Installation Information provided,
338 | in accord with this section must be in a format that is publicly
339 | documented (and with an implementation available to the public in
340 | source code form), and must require no special password or key for
341 | unpacking, reading or copying.
342 |
343 | 7. Additional Terms.
344 |
345 | "Additional permissions" are terms that supplement the terms of this
346 | License by making exceptions from one or more of its conditions.
347 | Additional permissions that are applicable to the entire Program shall
348 | be treated as though they were included in this License, to the extent
349 | that they are valid under applicable law. If additional permissions
350 | apply only to part of the Program, that part may be used separately
351 | under those permissions, but the entire Program remains governed by
352 | this License without regard to the additional permissions.
353 |
354 | When you convey a copy of a covered work, you may at your option
355 | remove any additional permissions from that copy, or from any part of
356 | it. (Additional permissions may be written to require their own
357 | removal in certain cases when you modify the work.) You may place
358 | additional permissions on material, added by you to a covered work,
359 | for which you have or can give appropriate copyright permission.
360 |
361 | Notwithstanding any other provision of this License, for material you
362 | add to a covered work, you may (if authorized by the copyright holders of
363 | that material) supplement the terms of this License with terms:
364 |
365 | a) Disclaiming warranty or limiting liability differently from the
366 | terms of sections 15 and 16 of this License; or
367 |
368 | b) Requiring preservation of specified reasonable legal notices or
369 | author attributions in that material or in the Appropriate Legal
370 | Notices displayed by works containing it; or
371 |
372 | c) Prohibiting misrepresentation of the origin of that material, or
373 | requiring that modified versions of such material be marked in
374 | reasonable ways as different from the original version; or
375 |
376 | d) Limiting the use for publicity purposes of names of licensors or
377 | authors of the material; or
378 |
379 | e) Declining to grant rights under trademark law for use of some
380 | trade names, trademarks, or service marks; or
381 |
382 | f) Requiring indemnification of licensors and authors of that
383 | material by anyone who conveys the material (or modified versions of
384 | it) with contractual assumptions of liability to the recipient, for
385 | any liability that these contractual assumptions directly impose on
386 | those licensors and authors.
387 |
388 | All other non-permissive additional terms are considered "further
389 | restrictions" within the meaning of section 10. If the Program as you
390 | received it, or any part of it, contains a notice stating that it is
391 | governed by this License along with a term that is a further
392 | restriction, you may remove that term. If a license document contains
393 | a further restriction but permits relicensing or conveying under this
394 | License, you may add to a covered work material governed by the terms
395 | of that license document, provided that the further restriction does
396 | not survive such relicensing or conveying.
397 |
398 | If you add terms to a covered work in accord with this section, you
399 | must place, in the relevant source files, a statement of the
400 | additional terms that apply to those files, or a notice indicating
401 | where to find the applicable terms.
402 |
403 | Additional terms, permissive or non-permissive, may be stated in the
404 | form of a separately written license, or stated as exceptions;
405 | the above requirements apply either way.
406 |
407 | 8. Termination.
408 |
409 | You may not propagate or modify a covered work except as expressly
410 | provided under this License. Any attempt otherwise to propagate or
411 | modify it is void, and will automatically terminate your rights under
412 | this License (including any patent licenses granted under the third
413 | paragraph of section 11).
414 |
415 | However, if you cease all violation of this License, then your
416 | license from a particular copyright holder is reinstated (a)
417 | provisionally, unless and until the copyright holder explicitly and
418 | finally terminates your license, and (b) permanently, if the copyright
419 | holder fails to notify you of the violation by some reasonable means
420 | prior to 60 days after the cessation.
421 |
422 | Moreover, your license from a particular copyright holder is
423 | reinstated permanently if the copyright holder notifies you of the
424 | violation by some reasonable means, this is the first time you have
425 | received notice of violation of this License (for any work) from that
426 | copyright holder, and you cure the violation prior to 30 days after
427 | your receipt of the notice.
428 |
429 | Termination of your rights under this section does not terminate the
430 | licenses of parties who have received copies or rights from you under
431 | this License. If your rights have been terminated and not permanently
432 | reinstated, you do not qualify to receive new licenses for the same
433 | material under section 10.
434 |
435 | 9. Acceptance Not Required for Having Copies.
436 |
437 | You are not required to accept this License in order to receive or
438 | run a copy of the Program. Ancillary propagation of a covered work
439 | occurring solely as a consequence of using peer-to-peer transmission
440 | to receive a copy likewise does not require acceptance. However,
441 | nothing other than this License grants you permission to propagate or
442 | modify any covered work. These actions infringe copyright if you do
443 | not accept this License. Therefore, by modifying or propagating a
444 | covered work, you indicate your acceptance of this License to do so.
445 |
446 | 10. Automatic Licensing of Downstream Recipients.
447 |
448 | Each time you convey a covered work, the recipient automatically
449 | receives a license from the original licensors, to run, modify and
450 | propagate that work, subject to this License. You are not responsible
451 | for enforcing compliance by third parties with this License.
452 |
453 | An "entity transaction" is a transaction transferring control of an
454 | organization, or substantially all assets of one, or subdividing an
455 | organization, or merging organizations. If propagation of a covered
456 | work results from an entity transaction, each party to that
457 | transaction who receives a copy of the work also receives whatever
458 | licenses to the work the party's predecessor in interest had or could
459 | give under the previous paragraph, plus a right to possession of the
460 | Corresponding Source of the work from the predecessor in interest, if
461 | the predecessor has it or can get it with reasonable efforts.
462 |
463 | You may not impose any further restrictions on the exercise of the
464 | rights granted or affirmed under this License. For example, you may
465 | not impose a license fee, royalty, or other charge for exercise of
466 | rights granted under this License, and you may not initiate litigation
467 | (including a cross-claim or counterclaim in a lawsuit) alleging that
468 | any patent claim is infringed by making, using, selling, offering for
469 | sale, or importing the Program or any portion of it.
470 |
471 | 11. Patents.
472 |
473 | A "contributor" is a copyright holder who authorizes use under this
474 | License of the Program or a work on which the Program is based. The
475 | work thus licensed is called the contributor's "contributor version".
476 |
477 | A contributor's "essential patent claims" are all patent claims
478 | owned or controlled by the contributor, whether already acquired or
479 | hereafter acquired, that would be infringed by some manner, permitted
480 | by this License, of making, using, or selling its contributor version,
481 | but do not include claims that would be infringed only as a
482 | consequence of further modification of the contributor version. For
483 | purposes of this definition, "control" includes the right to grant
484 | patent sublicenses in a manner consistent with the requirements of
485 | this License.
486 |
487 | Each contributor grants you a non-exclusive, worldwide, royalty-free
488 | patent license under the contributor's essential patent claims, to
489 | make, use, sell, offer for sale, import and otherwise run, modify and
490 | propagate the contents of its contributor version.
491 |
492 | In the following three paragraphs, a "patent license" is any express
493 | agreement or commitment, however denominated, not to enforce a patent
494 | (such as an express permission to practice a patent or covenant not to
495 | sue for patent infringement). To "grant" such a patent license to a
496 | party means to make such an agreement or commitment not to enforce a
497 | patent against the party.
498 |
499 | If you convey a covered work, knowingly relying on a patent license,
500 | and the Corresponding Source of the work is not available for anyone
501 | to copy, free of charge and under the terms of this License, through a
502 | publicly available network server or other readily accessible means,
503 | then you must either (1) cause the Corresponding Source to be so
504 | available, or (2) arrange to deprive yourself of the benefit of the
505 | patent license for this particular work, or (3) arrange, in a manner
506 | consistent with the requirements of this License, to extend the patent
507 | license to downstream recipients. "Knowingly relying" means you have
508 | actual knowledge that, but for the patent license, your conveying the
509 | covered work in a country, or your recipient's use of the covered work
510 | in a country, would infringe one or more identifiable patents in that
511 | country that you have reason to believe are valid.
512 |
513 | If, pursuant to or in connection with a single transaction or
514 | arrangement, you convey, or propagate by procuring conveyance of, a
515 | covered work, and grant a patent license to some of the parties
516 | receiving the covered work authorizing them to use, propagate, modify
517 | or convey a specific copy of the covered work, then the patent license
518 | you grant is automatically extended to all recipients of the covered
519 | work and works based on it.
520 |
521 | A patent license is "discriminatory" if it does not include within
522 | the scope of its coverage, prohibits the exercise of, or is
523 | conditioned on the non-exercise of one or more of the rights that are
524 | specifically granted under this License. You may not convey a covered
525 | work if you are a party to an arrangement with a third party that is
526 | in the business of distributing software, under which you make payment
527 | to the third party based on the extent of your activity of conveying
528 | the work, and under which the third party grants, to any of the
529 | parties who would receive the covered work from you, a discriminatory
530 | patent license (a) in connection with copies of the covered work
531 | conveyed by you (or copies made from those copies), or (b) primarily
532 | for and in connection with specific products or compilations that
533 | contain the covered work, unless you entered into that arrangement,
534 | or that patent license was granted, prior to 28 March 2007.
535 |
536 | Nothing in this License shall be construed as excluding or limiting
537 | any implied license or other defenses to infringement that may
538 | otherwise be available to you under applicable patent law.
539 |
540 | 12. No Surrender of Others' Freedom.
541 |
542 | If conditions are imposed on you (whether by court order, agreement or
543 | otherwise) that contradict the conditions of this License, they do not
544 | excuse you from the conditions of this License. If you cannot convey a
545 | covered work so as to satisfy simultaneously your obligations under this
546 | License and any other pertinent obligations, then as a consequence you may
547 | not convey it at all. For example, if you agree to terms that obligate you
548 | to collect a royalty for further conveying from those to whom you convey
549 | the Program, the only way you could satisfy both those terms and this
550 | License would be to refrain entirely from conveying the Program.
551 |
552 | 13. Use with the GNU Affero General Public License.
553 |
554 | Notwithstanding any other provision of this License, you have
555 | permission to link or combine any covered work with a work licensed
556 | under version 3 of the GNU Affero General Public License into a single
557 | combined work, and to convey the resulting work. The terms of this
558 | License will continue to apply to the part which is the covered work,
559 | but the special requirements of the GNU Affero General Public License,
560 | section 13, concerning interaction through a network will apply to the
561 | combination as such.
562 |
563 | 14. Revised Versions of this License.
564 |
565 | The Free Software Foundation may publish revised and/or new versions of
566 | the GNU General Public License from time to time. Such new versions will
567 | be similar in spirit to the present version, but may differ in detail to
568 | address new problems or concerns.
569 |
570 | Each version is given a distinguishing version number. If the
571 | Program specifies that a certain numbered version of the GNU General
572 | Public License "or any later version" applies to it, you have the
573 | option of following the terms and conditions either of that numbered
574 | version or of any later version published by the Free Software
575 | Foundation. If the Program does not specify a version number of the
576 | GNU General Public License, you may choose any version ever published
577 | by the Free Software Foundation.
578 |
579 | If the Program specifies that a proxy can decide which future
580 | versions of the GNU General Public License can be used, that proxy's
581 | public statement of acceptance of a version permanently authorizes you
582 | to choose that version for the Program.
583 |
584 | Later license versions may give you additional or different
585 | permissions. However, no additional obligations are imposed on any
586 | author or copyright holder as a result of your choosing to follow a
587 | later version.
588 |
589 | 15. Disclaimer of Warranty.
590 |
591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
599 |
600 | 16. Limitation of Liability.
601 |
602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
610 | SUCH DAMAGES.
611 |
612 | 17. Interpretation of Sections 15 and 16.
613 |
614 | If the disclaimer of warranty and limitation of liability provided
615 | above cannot be given local legal effect according to their terms,
616 | reviewing courts shall apply local law that most closely approximates
617 | an absolute waiver of all civil liability in connection with the
618 | Program, unless a warranty or assumption of liability accompanies a
619 | copy of the Program in return for a fee.
620 |
621 | END OF TERMS AND CONDITIONS
622 |
623 | How to Apply These Terms to Your New Programs
624 |
625 | If you develop a new program, and you want it to be of the greatest
626 | possible use to the public, the best way to achieve this is to make it
627 | free software which everyone can redistribute and change under these terms.
628 |
629 | To do so, attach the following notices to the program. It is safest
630 | to attach them to the start of each source file to most effectively
631 | state the exclusion of warranty; and each file should have at least
632 | the "copyright" line and a pointer to where the full notice is found.
633 |
634 |
635 | Copyright (C)
636 |
637 | This program is free software: you can redistribute it and/or modify
638 | it under the terms of the GNU General Public License as published by
639 | the Free Software Foundation, either version 3 of the License, or
640 | (at your option) any later version.
641 |
642 | This program is distributed in the hope that it will be useful,
643 | but WITHOUT ANY WARRANTY; without even the implied warranty of
644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
645 | GNU General Public License for more details.
646 |
647 | You should have received a copy of the GNU General Public License
648 | along with this program. If not, see .
649 |
650 | Also add information on how to contact you by electronic and paper mail.
651 |
652 | If the program does terminal interaction, make it output a short
653 | notice like this when it starts in an interactive mode:
654 |
655 | Copyright (C)
656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
657 | This is free software, and you are welcome to redistribute it
658 | under certain conditions; type `show c' for details.
659 |
660 | The hypothetical commands `show w' and `show c' should show the appropriate
661 | parts of the General Public License. Of course, your program's commands
662 | might be different; for a GUI interface, you would use an "about box".
663 |
664 | You should also get your employer (if you work as a programmer) or school,
665 | if any, to sign a "copyright disclaimer" for the program, if necessary.
666 | For more information on this, and how to apply and follow the GNU GPL, see
667 | .
668 |
669 | The GNU General Public License does not permit incorporating your program
670 | into proprietary programs. If your program is a subroutine library, you
671 | may consider it more useful to permit linking proprietary applications with
672 | the library. If this is what you want to do, use the GNU Lesser General
673 | Public License instead of this License. But first, please read
674 | .
675 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Loader
2 |
3 | This project helps to manage unlimited plugins repos with one [core repo](https://github.com/UsergeTeam/Userge).
4 |
5 | ## Main Features
6 |
7 | * optimized plugins structure
8 | * unlimited public and private plugins repos support
9 | * custom core repo support
10 | * priority handling
11 | * version controlling
12 | * branch switching
13 | * better control via config file
14 | * only installs required requirements
15 | * plugins conflict handling
16 | * auto requirements conflict resolver
17 | * constraints support (include/exclude/in)
18 | * recovery menu to reset crashed state
19 | * both windows and linux support
20 | * optimized boot time
21 |
22 | ## Plugins Repo Template
23 |
24 | you can fork and edit or refer our [official plugins repo](https://github.com/UsergeTeam/Userge-Plugins)
25 | to get familiar with the new structure.
26 | Also, you can check [custom plugins repo](https://github.com/UsergeTeam/Custom-Plugins) to get a better idea.
27 |
28 | ## Custom Core Repo
29 |
30 | set these env vars
31 |
32 | * `CORE_REPO` - repo url (default | https://github.com/UsergeTeam/Userge)
33 | * `CORE_BRANCH` - branch name (default | master)
34 |
35 | ## [Docker Guide](https://github.com/UsergeTeam/Loader/blob/master/Docker.md)
36 |
--------------------------------------------------------------------------------
/config.env.sample:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #
3 | # Copyright (C) 2020-2022 by UsergeTeam@Github, < https://github.com/UsergeTeam >.
4 | #
5 | # This file is part of < https://github.com/UsergeTeam/Userge > project,
6 | # and is released under the "GNU v3.0 License Agreement".
7 | # Please see < https://github.com/UsergeTeam/Userge/blob/master/LICENSE >
8 | #
9 | # All rights reserved.
10 |
11 |
12 | # ----------- REQUIRED ----------- #
13 |
14 |
15 | # Get them from https://my.telegram.org/
16 | API_ID=""
17 | API_HASH=""
18 |
19 |
20 | # Mongodb url from https://cloud.mongodb.com/
21 | # comment below line if you are going to use docker !
22 | DATABASE_URL=""
23 |
24 |
25 | # Telegram Log Channel ID
26 | LOG_CHANNEL_ID=""
27 |
28 |
29 | # one of USERGE MODE
30 | # you can use userge as [ USER or BOT or DUAL ] MODE
31 | # see below for more info
32 |
33 |
34 | # ----------- OPTIONAL ----------- #
35 |
36 |
37 | # assert running single Userge instance to prevent from AUTH_KEY_DUPLICATED error.
38 | ASSERT_SINGLE_INSTANCE=false
39 |
40 |
41 | # Custom pip packages which are seperated by spaces to install while starting the bot.
42 | CUSTOM_PIP_PACKAGES=""
43 |
44 |
45 | # Userbot Workers Count : Default = cpu_count + 4
46 | WORKERS=""
47 |
48 |
49 | # Set name to your working directory
50 | DOWN_PATH="downloads/"
51 |
52 |
53 | # set command prefix
54 | CMD_TRIGGER="."
55 |
56 |
57 | # set command prefix for SUDO users
58 | SUDO_TRIGGER="!"
59 |
60 |
61 | # ----------- Only If Using Heroku ----------- #
62 |
63 |
64 | # get a Heroku API key from http://dashboard.heroku.com/account
65 | HEROKU_API_KEY=""
66 |
67 |
68 | # given app name to the heroku app
69 | HEROKU_APP_NAME=""
70 |
71 |
72 | # ----------- USERGE MODES ----------- #
73 |
74 |
75 | # >>> USER MODE <<< #
76 | # use userge as user
77 | # get this using [ '@genStr_Bot' or `bash genStr` ]
78 | SESSION_STRING=""
79 |
80 |
81 | # >>> BOT MODE <<< #
82 | # use userge as bot
83 | # get this from https://t.me/botfather if you like to use userge as a bot
84 | # And your user id
85 | BOT_TOKEN=""
86 | OWNER_ID=""
87 |
88 |
89 | # >>> DUAL MODE <<< #
90 | # use userge as both user and bot
91 | # fill all USER MODE and BOT MODE
92 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.9"
2 |
3 | services:
4 | userge:
5 | build:
6 | context: https://github.com/UsergeTeam/Loader.git#master
7 | depends_on:
8 | - mongo
9 | restart: on-failure
10 | environment:
11 | DATABASE_URL: mongodb://root:example@mongo
12 | env_file:
13 | - config.env
14 |
15 | mongo:
16 | image: mongo
17 | volumes:
18 | - mongo_userge:/data/db
19 | environment:
20 | MONGO_INITDB_ROOT_USERNAME: root
21 | MONGO_INITDB_ROOT_PASSWORD: example
22 |
23 | volumes:
24 | mongo_userge:
--------------------------------------------------------------------------------
/genStr:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #
3 | # Copyright (C) 2020-2022 by UsergeTeam@Github, < https://github.com/UsergeTeam >.
4 | #
5 | # This file is part of < https://github.com/UsergeTeam/Userge > project,
6 | # and is released under the "GNU v3.0 License Agreement".
7 | # Please see < https://github.com/UsergeTeam/Userge/blob/master/LICENSE >
8 | #
9 | # All rights reserved.
10 |
11 | exec python3 tools/genstr.py
12 |
--------------------------------------------------------------------------------
/genStr.bat:
--------------------------------------------------------------------------------
1 | @echo off
2 |
3 | start python tools\genstr.py
4 |
--------------------------------------------------------------------------------
/install_req:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | exec python3 -m pip install -r requirements.txt
4 |
--------------------------------------------------------------------------------
/install_req.bat:
--------------------------------------------------------------------------------
1 | @echo off
2 |
3 | start pip install -r requirements.txt
4 |
--------------------------------------------------------------------------------
/loader/__init__.py:
--------------------------------------------------------------------------------
1 | __version__ = "5.0"
2 |
--------------------------------------------------------------------------------
/loader/__main__.py:
--------------------------------------------------------------------------------
1 | from importlib import import_module
2 | from os import execl
3 | from sys import executable
4 |
5 |
6 | if __name__ == '__main__':
7 | try:
8 | getattr(import_module("loader.core.main"), 'load')()
9 | except InterruptedError:
10 | execl(executable, executable, '-m', 'loader')
11 | raise SystemExit
12 |
--------------------------------------------------------------------------------
/loader/core/__init__.py:
--------------------------------------------------------------------------------
1 | from os import environ
2 |
3 | MIN_PY = "3.8"
4 | MAX_PY = "3.10"
5 |
6 | CONF_PATH = "config.env"
7 |
8 | # will be removed after pluggable utils and res
9 | CORE_REPO = environ.get('CORE_REPO', "https://github.com/UsergeTeam/Userge")
10 | CORE_BRANCH = environ.get('CORE_BRANCH', "beta")
11 |
--------------------------------------------------------------------------------
/loader/core/checks.py:
--------------------------------------------------------------------------------
1 | __all__ = ['do_checks']
2 |
3 | import atexit
4 | import json
5 | import os
6 | import sys
7 | from base64 import urlsafe_b64decode
8 | from os.path import exists, isfile
9 | from shutil import which
10 | from struct import unpack, error as struct_error
11 |
12 | from dotenv import load_dotenv
13 | from pymongo import MongoClient
14 |
15 | from . import MIN_PY, MAX_PY, CONF_PATH
16 | from .types import Database
17 | from .utils import log, error, open_url, assert_read, assert_read_write
18 |
19 | atexit.register(lambda _: exists(_) and assert_read_write(_), CONF_PATH)
20 |
21 |
22 | def _git() -> None:
23 | log("Checking Git ...")
24 |
25 | if not which("git"):
26 | error("Required git !", "install git")
27 |
28 |
29 | def _py_version() -> None:
30 | log("Checking Python Version ...")
31 |
32 | py_ver = tuple(sys.version_info[:2])
33 | min_py = tuple(map(int, MIN_PY.split(".")))
34 | max_py = tuple(map(int, MAX_PY.split(".")))
35 |
36 | if py_ver < min_py:
37 | error(f"You MUST have a python version of at least {MIN_PY}.0 !",
38 | "upgrade your python version")
39 |
40 | if py_ver > max_py:
41 | error(f"You MUST have a python version of at most {MAX_PY}.X !",
42 | "downgrade your python version")
43 |
44 | log(f"\tFound PYTHON - v{'.'.join(map(str, sys.version_info[:3]))} ...")
45 |
46 |
47 | def _config_file() -> None:
48 | log("Checking Config File ...")
49 |
50 | if isfile(CONF_PATH):
51 | log(f"\tConfig file found : {CONF_PATH}, Exporting ...")
52 |
53 | assert_read(CONF_PATH)
54 | load_dotenv(CONF_PATH)
55 |
56 |
57 | def _vars() -> None:
58 | log("Checking ENV Vars ...")
59 |
60 | env = os.environ
61 |
62 | string = env.get('SESSION_STRING')
63 |
64 | if env.get('HU_STRING_SESSION') and not string:
65 | error("Deprecated HU_STRING_SESSION var !", "its SESSION_STRING now")
66 |
67 | for _ in ('API_ID', 'API_HASH', 'DATABASE_URL', 'LOG_CHANNEL_ID'):
68 | val = env.get(_)
69 |
70 | if not val:
71 | error(f"Required {_} var !")
72 |
73 | log_channel = env.get('LOG_CHANNEL_ID')
74 |
75 | if not log_channel.startswith("-100") or not log_channel[1:].isnumeric():
76 | error(f"Invalid LOG_CHANNEL_ID {log_channel} !", "it should startswith -100")
77 |
78 | bot_token = env.get('BOT_TOKEN')
79 |
80 | if not string and not bot_token:
81 | error("Required SESSION_STRING or BOT_TOKEN var !")
82 |
83 | if string:
84 | if len(string) == 351:
85 | str_fmt = ">B?256sI?"
86 | elif len(string) == 356:
87 | str_fmt = ">B?256sQ?"
88 | else:
89 | str_fmt = ">BI?256sQ?"
90 |
91 | try:
92 | unpack(str_fmt, urlsafe_b64decode(string + "=" * (-len(string) % 4)))
93 | except struct_error:
94 | error("Invalid SESSION_STRING var !", "need a pyrogram session string")
95 |
96 | if bot_token:
97 | if ':' not in bot_token:
98 | error("Invalid BOT_TOKEN var !", "get it from @botfather")
99 |
100 | if not env.get('OWNER_ID'):
101 | error("Required OWNER_ID var !", "set your id to this")
102 |
103 | _var_data = dict(
104 | DOWN_PATH="downloads",
105 | ASSERT_SINGLE_INSTANCE="false",
106 | CMD_TRIGGER='.',
107 | SUDO_TRIGGER='!',
108 | FINISHED_PROGRESS_STR='█',
109 | UNFINISHED_PROGRESS_STR='░'
110 | )
111 | for k, v in _var_data.items():
112 | env.setdefault(k, v)
113 |
114 | workers = int(env.get('WORKERS') or 0)
115 | env['WORKERS'] = str(min(16, max(workers, 0) or os.cpu_count() + 4, os.cpu_count() + 4))
116 | env['MOTOR_MAX_WORKERS'] = env['WORKERS']
117 |
118 | down_path = env['DOWN_PATH']
119 | env['DOWN_PATH'] = down_path.rstrip('/') + '/'
120 |
121 | cmd_trigger = env['CMD_TRIGGER']
122 | sudo_trigger = env['SUDO_TRIGGER']
123 |
124 | if len(cmd_trigger) != 1 or len(sudo_trigger) != 1:
125 | error(f"Too large CMD_TRIGGER ({cmd_trigger}) or SUDO_TRIGGER ({sudo_trigger})",
126 | "trigger should be a single character")
127 |
128 | if cmd_trigger == sudo_trigger:
129 | error(f"Invalid SUDO_TRIGGER!, You can't use {cmd_trigger} as SUDO_TRIGGER",
130 | "use diff triggers for cmd and sudo triggers")
131 |
132 | if cmd_trigger == '/' or sudo_trigger == '/':
133 | error("You can't use / as CMD_TRIGGER or SUDO_TRIGGER", "try diff one")
134 |
135 | h_api = 'HEROKU_API_KEY'
136 | h_app = 'HEROKU_APP_NAME'
137 |
138 | if not env.get('DYNO'):
139 | for _ in (h_api, h_app):
140 | if _ in env:
141 | env.pop(_)
142 |
143 | h_api = env.get(h_api)
144 | h_app = env.get(h_app)
145 |
146 | if h_api and not h_app or not h_api and h_app:
147 | error("Need both HEROKU_API_KEY and HEROKU_APP_NAME vars !")
148 |
149 | if h_api and h_app:
150 | if len(h_api) != 36 or len(h_api.split('-')) != 5:
151 | error(f"Invalid HEROKU_API_KEY ({h_api}) !")
152 |
153 | headers = {
154 | 'Accept': "application/vnd.heroku+json; version=3",
155 | 'Authorization': f"Bearer {h_api}"
156 | }
157 |
158 | r, e = open_url("https://api.heroku.com/account/rate-limits", headers)
159 | if e:
160 | error(f"Invalid HEROKU_API_KEY, {r} > {e}")
161 |
162 | r, e = open_url(f"https://api.heroku.com/apps/{h_app}", headers)
163 | if e:
164 | error(f"Couldn't find heroku app ({h_app}), {r} > {e}",
165 | "either name invalid or api key from diff account")
166 |
167 | if Database.is_none():
168 | db_url = env.get('DATABASE_URL')
169 |
170 | try:
171 | new_url = Database.fix_url(db_url)
172 | except (ValueError, AttributeError):
173 | error(f"Invalid DATABASE_URL > ({db_url}) !")
174 | return
175 |
176 | if new_url != db_url:
177 | env['DATABASE_URL'] = new_url
178 |
179 | cl = MongoClient(new_url, maxPoolSize=1, minPoolSize=0)
180 |
181 | try:
182 | cl.list_database_names()
183 | except Exception as e:
184 | error(f"Invalid DATABASE_URL > {str(e)}")
185 |
186 | Database.set(cl)
187 |
188 | if bot_token:
189 | api_url = "https://api.telegram.org/bot" + bot_token
190 |
191 | e = open_url(api_url + "/getMe")[1]
192 |
193 | if e:
194 | error("Invalid BOT_TOKEN var !", "get or revoke it from @botfather")
195 |
196 | r, e = open_url(api_url + "/getChat?chat_id=" + log_channel)
197 |
198 | if e:
199 | if r == 400:
200 | error(f"Invalid LOG_CHANNEL_ID ({log_channel}) !",
201 | "add your bot to log chat if this value is ok")
202 |
203 | if r == 403:
204 | error("Bot not found in log chat !", "add bot to your log chat as admin")
205 |
206 | error(f"Unknown error [getChat] ({r}) {e} !", "ask @usergeot")
207 |
208 | result = json.loads(r.read())['result']
209 |
210 | chat_type = result.get('type')
211 | chat_username = result.get('username')
212 |
213 | if chat_type not in ('supergroup', 'channel'):
214 | error(f"Invalid log chat type ({chat_type}) !",
215 | "only supergroups and channels are supported")
216 |
217 | if chat_username:
218 | error(f"Can't use a public log chat (@{chat_username}) !", "make it private")
219 |
220 | for _ in (down_path, '.rcache'):
221 | os.makedirs(_, exist_ok=True)
222 |
223 |
224 | def do_checks() -> None:
225 | _git()
226 | _py_version()
227 | _config_file()
228 | _vars()
229 |
--------------------------------------------------------------------------------
/loader/core/main.py:
--------------------------------------------------------------------------------
1 | __all__ = ['load']
2 |
3 | import os
4 | import sys
5 | from contextlib import suppress
6 | from multiprocessing import Process, Pipe, set_start_method
7 | from shutil import which
8 | from signal import signal, SIGINT, SIGTERM, SIGABRT
9 | from typing import Set
10 |
11 | from .checks import do_checks
12 | from .menu import main_menu
13 | from .methods import fetch_core, fetch_repos
14 | from .types import Repos, Constraints, Sig, Requirements, Session, Tasks
15 | from .utils import log, error, call, get_client_type, safe_url, grab_conflicts, clean_core, \
16 | clean_plugins, print_logo
17 | from .. import __version__
18 | from ..userge.main import run
19 |
20 |
21 | def load_data() -> None:
22 | log("Loading Data ...")
23 |
24 | Repos.load()
25 | Constraints.load()
26 |
27 |
28 | def init_core() -> None:
29 | log("Fetching Core ...")
30 |
31 | fetch_core()
32 | if Sig.core_exists():
33 | return
34 |
35 | log("Initializing Core ...")
36 |
37 | core = Repos.get_core()
38 | if core.failed:
39 | code, err = core.error
40 | error(f"error code: [{code}]\n{err}")
41 |
42 | core.checkout_version()
43 |
44 | loader_version = core.grab_loader_version()
45 |
46 | if loader_version:
47 | if __version__ < loader_version:
48 | log("\tUpdating loader to latest ...")
49 |
50 | code, err = call("git", "pull")
51 | if code:
52 | error(f"error code: [{code}]\n{err}")
53 |
54 | raise InterruptedError
55 |
56 | Requirements.update(core.grab_req())
57 |
58 | clean_core()
59 | core.copy()
60 |
61 | core.checkout_branch()
62 |
63 | Sig.repos_remove()
64 | Sig.core_make()
65 |
66 |
67 | def init_repos() -> None:
68 | log("Fetching Repos ...")
69 |
70 | fetch_repos()
71 | if not Repos.has_repos() or Sig.repos_exists():
72 | return
73 |
74 | log("Initializing Repos ...")
75 |
76 | repos = 0
77 | plugins = {}
78 | core_version = Repos.get_core().info.count
79 | client_type = get_client_type()
80 | os_type = dict(posix='linux', nt='windows').get(os.name, os.name)
81 |
82 | for repo in Repos.iter_repos():
83 | if repo.failed:
84 | code, err = repo.error
85 | log(f"\tSkipping: {safe_url(repo.info.url)} code: [{code}] due to: {err}")
86 | continue
87 |
88 | repo.checkout_version()
89 | repo.load_plugins()
90 |
91 | unique = 0
92 | ignored = 0
93 | overridden = 0
94 |
95 | for plg in repo.iter_plugins():
96 | conf = plg.config
97 | reason = None
98 |
99 | for _ in ' ':
100 | if not conf.available:
101 | reason = "not available"
102 | break
103 |
104 | constraint = Constraints.match(plg)
105 | if constraint:
106 | reason = f"constraint {constraint}"
107 | break
108 |
109 | if conf.os and conf.os != os_type:
110 | reason = f"incompatible os type {os_type}, required: {conf.os}"
111 | break
112 |
113 | if conf.min_core and conf.min_core > core_version:
114 | reason = (f"min core version {conf.min_core} is required, "
115 | f"current: {core_version}")
116 | break
117 |
118 | if conf.max_core and conf.max_core < core_version:
119 | reason = (f"max core version {conf.max_core} is required, "
120 | f"current: {core_version}")
121 | break
122 |
123 | if (
124 | conf.client_type
125 | and client_type != "dual"
126 | and conf.client_type.lower() != client_type
127 | ):
128 | c_type = conf.client_type.lower()
129 | reason = f"client type {c_type} is required, current: {client_type}"
130 | break
131 |
132 | if conf.envs:
133 | for env in conf.envs:
134 | if '|' in env:
135 | parts = tuple(filter(None, map(str.strip, env.split('|'))))
136 |
137 | for part in parts:
138 | if os.environ.get(part):
139 | break
140 | else:
141 | reason = f"one of envs {', '.join(parts)} is required"
142 | break
143 | else:
144 | if not os.environ.get(env):
145 | reason = f"env {env} is required"
146 | break
147 |
148 | if reason:
149 | break
150 |
151 | if conf.bins:
152 | for bin_ in conf.bins:
153 | if not which(bin_):
154 | reason = f"bin {bin_} is required"
155 | break
156 |
157 | if reason:
158 | break
159 |
160 | old = plugins.get(plg.name)
161 | plugins[plg.name] = plg
162 |
163 | if old:
164 | overridden += 1
165 | log(f"\tPlugin: [{plg.cat}/{plg.name}] "
166 | f"is overriding Repo: {safe_url(old.repo_url)}")
167 | else:
168 | unique += 1
169 |
170 | else:
171 | continue
172 |
173 | ignored += 1
174 | log(f"\tPlugin: [{plg.cat}/{plg.name}] was ignored due to: {reason}")
175 |
176 | repos += 1
177 | log(f"\t\tRepo: {safe_url(repo.info.url)} "
178 | f"ignored: {ignored} overridden: {overridden} unique: {unique}")
179 |
180 | if plugins:
181 |
182 | for c_plg in Repos.get_core().get_plugins():
183 | if c_plg in plugins:
184 | plg = plugins.pop(c_plg)
185 |
186 | log(f"\tPlugin: [{plg.cat}/{plg.name}] was removed due to: "
187 | "matching builtin found")
188 |
189 | def resolve_depends() -> None:
190 | all_ok = False
191 |
192 | while plugins and not all_ok:
193 | all_ok = True
194 |
195 | for plg_ in tuple(plugins.values()):
196 | deps = plg_.config.depends
197 | if not deps:
198 | continue
199 |
200 | for dep in deps:
201 | if dep not in plugins:
202 | all_ok = False
203 | del plugins[plg_.name]
204 |
205 | log(f"\tPlugin: [{plg_.cat}/{plg_.name}] was removed due to: "
206 | f"plugin [{dep}] not found")
207 |
208 | break
209 |
210 | def grab_requirements() -> Set[str]:
211 | data = set()
212 |
213 | for plg_ in plugins.values():
214 | packages_ = plg_.config.packages
215 | if packages_:
216 | data.update(packages_)
217 |
218 | return data
219 |
220 | resolve_depends()
221 | requirements = grab_requirements()
222 |
223 | if requirements:
224 | conflicts = grab_conflicts(requirements)
225 |
226 | if conflicts:
227 | for conflict in conflicts:
228 | for plg in tuple(plugins.values()):
229 | packages = plg.config.packages
230 |
231 | if packages and conflict in packages:
232 | del plugins[plg.name]
233 |
234 | log(f"\tPlugin: [{plg.cat}/{plg.name}] was removed due to: "
235 | f"conflicting requirement [{conflict}] found")
236 |
237 | resolve_depends()
238 | requirements = grab_requirements()
239 |
240 | Requirements.update(requirements)
241 |
242 | clean_plugins()
243 |
244 | for plg in plugins.values():
245 | plg.copy()
246 |
247 | log(f"\tTotal plugins: {len(plugins)} from repos: {repos}")
248 |
249 | for repo in Repos.iter_repos():
250 | repo.checkout_branch()
251 |
252 | Sig.repos_make()
253 |
254 |
255 | def install_req() -> None:
256 | pip = os.environ.get('CUSTOM_PIP_PACKAGES')
257 | if pip:
258 | Requirements.update(pip.split())
259 |
260 | size = Requirements.size()
261 | if size > 0:
262 | log(f"Installing Requirements ({size}) ...")
263 |
264 | code, err = Requirements.install()
265 | if code:
266 | error(f"error code: [{code}]\n{err}", interrupt=False)
267 |
268 | Sig.repos_remove()
269 |
270 |
271 | def check_args() -> None:
272 | if len(sys.argv) > 1 and sys.argv[1].lower() == "menu":
273 | main_menu()
274 |
275 |
276 | def run_loader() -> None:
277 | load_data()
278 | init_core()
279 | init_repos()
280 | install_req()
281 |
282 |
283 | def initialize() -> None:
284 | try:
285 | print_logo()
286 | do_checks()
287 | check_args()
288 | run_loader()
289 | except InterruptedError:
290 | raise
291 | except Exception as e:
292 | error(str(e))
293 |
294 |
295 | def run_userge() -> None:
296 | log("Starting Userge ...")
297 |
298 | p_p, c_p = Pipe()
299 | p = Process(name="userge", target=run, args=(c_p,))
300 | Session.set_process(p)
301 |
302 | def handle(*_):
303 | p_p.close()
304 | Session.terminate()
305 |
306 | for _ in (SIGINT, SIGTERM, SIGABRT):
307 | signal(_, handle)
308 |
309 | p.start()
310 | c_p.close()
311 |
312 | with suppress(EOFError, OSError):
313 | while p.is_alive() and not p_p.closed:
314 | p_p.send(Tasks.handle(*p_p.recv()))
315 |
316 | p_p.close()
317 | p.join()
318 | p.close()
319 |
320 |
321 | def _load() -> None:
322 | if Session.should_init():
323 | initialize()
324 |
325 | run_userge()
326 | if Session.should_restart():
327 | _load()
328 |
329 |
330 | def load() -> None:
331 | log(f"Loader v{__version__}")
332 | set_start_method('spawn')
333 |
334 | with suppress(KeyboardInterrupt):
335 | _load()
336 |
337 | raise SystemExit
338 |
--------------------------------------------------------------------------------
/loader/core/menu.py:
--------------------------------------------------------------------------------
1 | __all__ = ['main_menu']
2 |
3 | import os
4 | from time import sleep
5 |
6 | from .types import Repos, Sig, Cache
7 |
8 |
9 | def _clear():
10 | os.system('cls' if os.name == 'nt' else 'clear')
11 |
12 |
13 | def _print(out) -> None:
14 | print(f"{out} !!!")
15 | sleep(2)
16 |
17 |
18 | def _invalid(val):
19 | return input(f"invalid input {val}: ")
20 |
21 |
22 | def _delete_repos() -> None:
23 | _clear()
24 |
25 | Repos.load()
26 |
27 | out = ""
28 |
29 | for repo in Repos.iter_repos():
30 | out += f"{repo.info.id}. {repo.info.url}\n"
31 |
32 | code = input(f"""Menu > settings > repos > delete
33 | 0. back
34 | {out.strip()}
35 | : """).strip()
36 |
37 | while True:
38 | if code == '0':
39 | _repos()
40 |
41 | else:
42 | if code.isnumeric() and Repos.get(int(code)):
43 | Repos.remove(int(code))
44 |
45 | _delete_repos()
46 |
47 | else:
48 | code = _invalid(code)
49 | continue
50 |
51 | break
52 |
53 |
54 | def _core() -> None:
55 | _clear()
56 |
57 | code = input("""Menu > settings > core
58 | 0. back
59 | 1. reset
60 | 2. invalidate cache
61 | 3. clear cache
62 | 4. menu
63 | : """).strip()
64 |
65 | while True:
66 | if code == '0':
67 | _settings()
68 |
69 | elif code == '1':
70 | Repos.load()
71 |
72 | core = Repos.get_core()
73 | if core:
74 | core.reset()
75 |
76 | _print("reset core")
77 | _core()
78 |
79 | elif code == '2' or code == '3':
80 | Sig.core_remove()
81 |
82 | if code == '2':
83 | _print("invalidated core cache")
84 |
85 | else:
86 | Cache.core_remove()
87 |
88 | _print("cleared core cache")
89 |
90 | _core()
91 |
92 | elif code == '4':
93 | main_menu()
94 |
95 | else:
96 | code = _invalid(code)
97 | continue
98 |
99 | break
100 |
101 |
102 | def _repos() -> None:
103 | _clear()
104 |
105 | code = input("""Menu > settings > repos
106 | 0. back
107 | 1. delete
108 | 2. invalidate cache
109 | 3. clear cache
110 | 4. menu
111 | : """).strip()
112 |
113 | while True:
114 | if code == '0':
115 | _settings()
116 |
117 | elif code == '1':
118 | _delete_repos()
119 |
120 | elif code == '2' or code == '3':
121 | Sig.repos_remove()
122 |
123 | if code == '2':
124 | _print("invalidated repos cache")
125 |
126 | else:
127 | Cache.repos_remove()
128 |
129 | _print("cleared repos cache")
130 |
131 | _repos()
132 |
133 | elif code == '4':
134 | main_menu()
135 |
136 | else:
137 | code = _invalid(code)
138 | continue
139 |
140 | break
141 |
142 |
143 | def _settings() -> None:
144 | _clear()
145 |
146 | code = input("""Menu > settings
147 | 0. back
148 | 1. core
149 | 2. repos
150 | 3. invalidate cache
151 | 4. clear cache
152 | : """).strip()
153 |
154 | while True:
155 | if code == '0':
156 | main_menu()
157 |
158 | elif code == '1':
159 | _core()
160 |
161 | elif code == '2':
162 | _repos()
163 |
164 | elif code == '3' or code == '4':
165 | Sig.core_remove()
166 | Sig.repos_remove()
167 |
168 | if code == '3':
169 | _print("invalidated cache")
170 |
171 | else:
172 | Cache.core_remove()
173 | Cache.repos_remove()
174 |
175 | _print("cleared cache")
176 |
177 | _settings()
178 |
179 | else:
180 | code = _invalid(code)
181 | continue
182 |
183 | break
184 |
185 |
186 | def main_menu() -> None:
187 | _clear()
188 |
189 | code = input("""Menu
190 | 1. start
191 | 2. settings
192 | 3. exit
193 | : """).strip()
194 |
195 | while True:
196 | if code == '1':
197 | _clear()
198 |
199 | elif code == '2':
200 | _settings()
201 |
202 | elif code == '3':
203 | _clear()
204 | raise KeyboardInterrupt
205 |
206 | else:
207 | code = _invalid(code)
208 | continue
209 |
210 | break
211 |
--------------------------------------------------------------------------------
/loader/core/methods.py:
--------------------------------------------------------------------------------
1 | __all__ = ['fetch_core', 'fetch_repos']
2 |
3 | from contextlib import suppress
4 | from os import environ
5 | from typing import List, Optional, Callable, Union
6 |
7 | from dotenv import set_key, unset_key
8 |
9 | from . import CONF_PATH
10 | from .types import Tasks, Session, Repos, Constraints, Sig
11 | from .utils import error, safe_repo_info
12 | from .. import job
13 | from ..types import RepoInfo, Update, Constraint
14 |
15 |
16 | def on(work: int) -> Callable[[Callable], Callable]:
17 | def wrapper(func: Callable) -> Callable:
18 | Tasks.add(work, func)
19 | return func
20 |
21 | return wrapper
22 |
23 |
24 | @on(job.SOFT_RESTART)
25 | def restart_soft() -> None:
26 | Session.restart(False)
27 |
28 |
29 | @on(job.HARD_RESTART)
30 | def restart_hard() -> None:
31 | Session.restart(True)
32 |
33 |
34 | @on(job.FETCH_CORE)
35 | def fetch_core() -> None:
36 | core = Repos.get_core()
37 | if not core:
38 | error("Core Not Found !")
39 |
40 | core.init()
41 | core.fetch()
42 |
43 |
44 | @on(job.FETCH_REPO)
45 | def fetch_repo(repo_id: int) -> None:
46 | repo = Repos.get(repo_id)
47 | if repo:
48 | repo.init()
49 | repo.fetch()
50 |
51 |
52 | @on(job.FETCH_REPOS)
53 | def fetch_repos() -> None:
54 | for repo in Repos.iter_repos():
55 | repo.init()
56 | repo.fetch()
57 |
58 |
59 | @on(job.GET_CORE)
60 | def get_core() -> Optional[RepoInfo]:
61 | core = Repos.get_core()
62 | if core:
63 | return core.info
64 |
65 |
66 | @on(job.GET_REPO)
67 | def get_repo(repo_id: int) -> Optional[RepoInfo]:
68 | repo = Repos.get(repo_id)
69 | if repo:
70 | return safe_repo_info(repo.info)
71 |
72 |
73 | @on(job.GET_REPOS)
74 | def get_repos() -> List[RepoInfo]:
75 | data = []
76 |
77 | for repo in Repos.iter_repos():
78 | data.append(safe_repo_info(repo.info))
79 |
80 | return data
81 |
82 |
83 | @on(job.ADD_REPO)
84 | def add_repo(priority: int, branch: str, url: str) -> bool:
85 | return Repos.add(priority, branch, url)
86 |
87 |
88 | @on(job.REMOVE_REPO)
89 | def remove_repo(repo_id: int) -> bool:
90 | return Repos.remove(repo_id)
91 |
92 |
93 | @on(job.GET_CORE_NEW_COMMITS)
94 | def get_core_new_commits() -> Optional[List[Update]]:
95 | core = Repos.get_core()
96 | if core:
97 | return core.new_commits()
98 |
99 |
100 | @on(job.GET_CORE_OLD_COMMITS)
101 | def get_core_old_commits(limit: int) -> Optional[List[Update]]:
102 | core = Repos.get_core()
103 | if core:
104 | return core.old_commits(limit)
105 |
106 |
107 | @on(job.GET_REPO_NEW_COMMITS)
108 | def get_repo_new_commits(repo_id: int) -> Optional[List[Update]]:
109 | repo = Repos.get(repo_id)
110 | if repo:
111 | return repo.new_commits()
112 |
113 |
114 | @on(job.GET_REPO_OLD_COMMITS)
115 | def get_repo_old_commits(repo_id: int, limit: int) -> Optional[List[Update]]:
116 | repo = Repos.get(repo_id)
117 | if repo:
118 | return repo.old_commits(limit)
119 |
120 |
121 | @on(job.EDIT_CORE)
122 | def edit_core(branch: Optional[str], version: Optional[Union[int, str]]) -> bool:
123 | core = Repos.get_core()
124 | if core:
125 | return core.edit(branch, version)
126 |
127 | return False
128 |
129 |
130 | @on(job.EDIT_REPO)
131 | def edit_repo(repo_id: int, branch: Optional[str], version: Optional[Union[int, str]],
132 | priority: Optional[int]) -> bool:
133 | repo = Repos.get(repo_id)
134 | if repo:
135 | return repo.edit(branch, version, priority)
136 |
137 | return False
138 |
139 |
140 | @on(job.ADD_CONSTRAINTS)
141 | def add_constraints(c_type: str, data: List[str]) -> bool:
142 | return Constraints.add(c_type, data)
143 |
144 |
145 | @on(job.REMOVE_CONSTRAINTS)
146 | def remove_constraints(c_type: Optional[str], data: List[str]) -> bool:
147 | return Constraints.remove(c_type, data)
148 |
149 |
150 | @on(job.GET_CONSTRAINTS)
151 | def get_constraints() -> List[Constraint]:
152 | return Constraints.get()
153 |
154 |
155 | @on(job.CLEAR_CONSTRAINTS)
156 | def clear_constraints(c_type: Optional[str]) -> bool:
157 | return Constraints.clear(c_type)
158 |
159 |
160 | @on(job.INVALIDATE_REPOS_CACHE)
161 | def invalidate_repos_cache() -> None:
162 | Sig.repos_remove()
163 |
164 |
165 | @on(job.SET_ENV)
166 | def set_env(key: str, value: str) -> None:
167 | set_key(CONF_PATH, key, value)
168 | if key not in environ:
169 | Sig.repos_remove()
170 |
171 | environ[key] = value
172 |
173 |
174 | @on(job.UNSET_ENV)
175 | def unset_env(key: str) -> None:
176 | unset_key(CONF_PATH, key)
177 | with suppress(KeyError):
178 | del environ[key]
179 | Sig.repos_remove()
180 |
--------------------------------------------------------------------------------
/loader/core/types.py:
--------------------------------------------------------------------------------
1 | __all__ = ['Database', 'Repos', 'Constraints', 'Sig', 'Cache', 'Requirements', 'Session', 'Tasks']
2 |
3 | import os
4 | import re
5 | import sys
6 | from configparser import ConfigParser, SectionProxy
7 | from contextlib import suppress
8 | from itertools import count
9 | from multiprocessing import Process
10 | from os.path import isdir, join, exists, isfile
11 | from shutil import copytree
12 | from typing import Set, Iterable, Dict, Union, Optional, List, Callable, Tuple, Iterator
13 | from urllib.parse import quote_plus
14 |
15 | from git import Repo as GitRepo, Commit, InvalidGitRepositoryError, GitCommandError
16 | from gitdb.exc import BadName
17 | from pymongo import MongoClient
18 | from pymongo.collection import Collection
19 |
20 | from . import CORE_REPO, CORE_BRANCH, CONF_PATH
21 | from .utils import error, terminate, call, safe_url, remove, rmtree, assert_write
22 | from ..types import RepoInfo, Update, Constraint
23 |
24 | _CACHE_PATH = ".rcache"
25 |
26 |
27 | class Database:
28 | _instance = None
29 |
30 | @classmethod
31 | def is_none(cls) -> bool:
32 | return cls._instance is None
33 |
34 | @classmethod
35 | def get(cls) -> 'Database':
36 | if not cls._instance:
37 | error("Database not initialized !")
38 | return cls._instance
39 |
40 | @classmethod
41 | def set(cls, client: MongoClient) -> None:
42 | if not cls._instance:
43 | cls._instance = cls.parse(client)
44 |
45 | _RE_UP = re.compile(r"(?<=//)(.+)(?=@\w+)")
46 |
47 | @classmethod
48 | def fix_url(cls, url: str) -> str:
49 | u_and_p = cls._RE_UP.search(url).group(1)
50 | name, pwd = u_and_p.split(':')
51 | escaped = quote_plus(name) + ':' + quote_plus(pwd)
52 | return url.replace(u_and_p, escaped)
53 |
54 | def __init__(self, config: Collection, repos: Collection, constraint: Collection):
55 | self._config = config
56 | self._repos = repos
57 | self._constraint = constraint
58 |
59 | @classmethod
60 | def parse(cls, client: MongoClient) -> 'Database':
61 | db = client["Loader"]
62 |
63 | config = db["config"]
64 | repos = db["repos"]
65 | constraint = db["constraint"]
66 |
67 | return cls(config, repos, constraint)
68 |
69 | @property
70 | def config(self) -> Collection:
71 | return self._config
72 |
73 | @property
74 | def repos(self) -> Collection:
75 | return self._repos
76 |
77 | @property
78 | def constraint(self) -> Collection:
79 | return self._constraint
80 |
81 |
82 | class _Parser:
83 | def __init__(self, section: SectionProxy):
84 | self._section = section
85 |
86 | @classmethod
87 | def parse(cls, path: str) -> '_Parser':
88 | parser = ConfigParser()
89 | parser.read(path)
90 | section = parser[parser.default_section]
91 |
92 | return cls(section)
93 |
94 | def get(self, key: str) -> Optional[str]:
95 | with suppress(KeyError):
96 | return self._section.get(key)
97 |
98 | def getint(self, key: str) -> Optional[int]:
99 | with suppress(KeyError, ValueError):
100 | return self._section.getint(key)
101 |
102 | def getboolean(self, key: str) -> Optional[bool]:
103 | with suppress(KeyError, ValueError):
104 | return self._section.getboolean(key)
105 |
106 | def getset(self, key: str, lower=False) -> Optional[Set[str]]:
107 | value = self.get(key)
108 | if value:
109 | return set(filter(None, map(
110 | lambda _: _.strip().lower() if lower else _.strip(), value.split(','))))
111 |
112 |
113 | class _Config:
114 | def __init__(self, available: Optional[bool], os_type: Optional[str],
115 | min_core: Optional[int], max_core: Optional[int], client_type: Optional[str],
116 | envs: Optional[Set[str]], bins: Optional[Set[str]],
117 | depends: Optional[Set[str]], packages: Optional[Set[str]]):
118 | self.available = available
119 | self.os = os_type
120 | self.min_core = min_core
121 | self.max_core = max_core
122 | self.client_type = client_type
123 | self.envs = envs
124 | self.bins = bins
125 | self.depends = depends
126 | self.packages = packages
127 |
128 | @classmethod
129 | def parse(cls, path: str) -> '_Config':
130 | parser = _Parser.parse(path)
131 |
132 | available = parser.getboolean('available')
133 | os_type = parser.get('os')
134 | min_core = parser.getint('min_core')
135 | max_core = parser.getint('max_core')
136 | client_type = parser.get('client_type')
137 | envs = parser.getset('envs')
138 | bins = parser.getset('bins')
139 | depends = parser.getset('depends', True)
140 | packages = parser.getset('packages', True)
141 |
142 | return cls(available, os_type, min_core, max_core,
143 | client_type, envs, bins, depends, packages)
144 |
145 |
146 | class _Plugin:
147 | def __init__(self, path: str, cat: str, name: str,
148 | config: _Config, repo_name: str, repo_url: str):
149 | self.path = path
150 | self.cat = cat
151 | self.name = name
152 | self.config = config
153 | self.repo_name = repo_name
154 | self.repo_url = repo_url
155 |
156 | @classmethod
157 | def parse(cls, path: str, cat: str, name: str, repo: RepoInfo) -> '_Plugin':
158 | config = _Config.parse(join(path, "config.ini"))
159 |
160 | return cls(path, cat, name, config, repo.name, repo.url)
161 |
162 | def copy(self) -> None:
163 | copytree(self.path, join("userge", "plugins", self.cat, self.name))
164 |
165 |
166 | class _BaseRepo:
167 | def __init__(self, info: RepoInfo, path: str):
168 | self.info = info
169 | self._path = path
170 | self._git: Optional[GitRepo] = None
171 | self._error_code = 0
172 | self._stderr = ""
173 |
174 | @property
175 | def failed(self):
176 | return self._git is None
177 |
178 | @property
179 | def error(self) -> Tuple[int, str]:
180 | return self._error_code, self._stderr
181 |
182 | def init(self) -> None:
183 | if self._git:
184 | return
185 |
186 | if exists(self._path):
187 | try:
188 | self._git = GitRepo(self._path)
189 | except InvalidGitRepositoryError:
190 | self.delete()
191 |
192 | if not self._git:
193 | try:
194 | self._git = GitRepo.clone_from(self.info.url, self._path)
195 | except GitCommandError as e:
196 | self._error_code = e.status
197 | self._stderr = (e.stderr or 'null').strip()
198 |
199 | def _branch_exists(self, branch: str) -> bool:
200 | return branch and self._git and branch in self._git.heads
201 |
202 | def _get_commit(self, version: Optional[Union[int, str]] = None) -> Optional[Commit]:
203 | if version is None:
204 | version = self.info.version
205 |
206 | if self._git:
207 | if isinstance(version, int) or version.isnumeric():
208 | commit = self._git.commit(self.info.branch)
209 |
210 | input_count = int(version)
211 | head_count = commit.count()
212 |
213 | if input_count == head_count:
214 | return commit
215 |
216 | if input_count < head_count:
217 | skip = head_count - input_count
218 | data = list(self._git.iter_commits(self.info.branch, max_count=1, skip=skip))
219 |
220 | if data:
221 | return data[0]
222 |
223 | elif isinstance(version, str) and version:
224 | with suppress(BadName, ValueError):
225 | return self._git.commit(version)
226 |
227 | def fetch(self) -> None:
228 | if self.failed:
229 | return
230 |
231 | _branches = set()
232 |
233 | try:
234 | for info in self._git.remote().fetch():
235 | try:
236 | branch = info.ref.remote_head
237 | except ValueError:
238 | continue
239 |
240 | _branches.add(branch)
241 |
242 | if branch not in self._git.heads:
243 | self._git.create_head(branch, info.ref).set_tracking_branch(info.ref)
244 |
245 | except GitCommandError as e:
246 | self._git = None
247 | self._error_code = e.status
248 | self._stderr = (e.stderr or 'null').strip()
249 | return
250 |
251 | for head in self._git.heads:
252 | if head.name not in _branches:
253 | if head == self._git.head.ref:
254 | self._git.git.checkout(head.commit.hexsha, force=True)
255 |
256 | self._git.delete_head(head, force=True)
257 |
258 | _changed = False
259 |
260 | if self._branch_exists(self.info.branch):
261 | head = self._git.heads[self.info.branch]
262 | else:
263 | head = self._git.heads[0]
264 | self.info.branch = head.name
265 | _changed = True
266 |
267 | if self._git.head.is_detached or self._git.head.ref != head:
268 | head.checkout(force=True)
269 |
270 | self._git.head.reset(self._git.remote().refs[head.name].name, working_tree=True)
271 |
272 | version = self.info.version
273 | commit = (self._get_commit(version) if version else None) or head.commit
274 |
275 | if version != commit.hexsha:
276 | self.info.version = commit.hexsha
277 | _changed = True
278 |
279 | self.info.count = commit.count()
280 | self.info.max_count = head.commit.count()
281 |
282 | self.info.branches.clear()
283 | self.info.branches.extend(head.name for head in self._git.heads)
284 | self.info.branches.sort()
285 |
286 | if _changed:
287 | self._update()
288 |
289 | def checkout_version(self) -> None:
290 | version = self.info.version
291 |
292 | if self._git and self._git.head.commit.hexsha != version:
293 | self._git.git.checkout(version, force=True)
294 |
295 | def checkout_branch(self) -> None:
296 | branch = self.info.branch
297 |
298 | if self._git and (self._git.head.is_detached or self._git.head.ref.name != branch):
299 | self._git.git.checkout(branch, force=True)
300 |
301 | def copy(self, source: str, path: str) -> None:
302 | copytree(join(self._path, source), path)
303 |
304 | def new_commits(self) -> List[Update]:
305 | data = []
306 | head = self._get_commit()
307 |
308 | if head:
309 | top = self._git.commit(self.info.branch)
310 | diff = top.count() - head.count()
311 |
312 | if diff > 0:
313 | for commit in self._git.iter_commits(self.info.branch, max_count=diff):
314 | data.append(Update.parse(safe_url(self.info.url), commit))
315 |
316 | return data
317 |
318 | def old_commits(self, limit: int) -> List[Update]:
319 | data = []
320 |
321 | if limit > 0:
322 | head = self._get_commit()
323 |
324 | if head:
325 | top = self._git.commit(self.info.branch)
326 | skip = top.count() - head.count() + 1
327 |
328 | if skip > 0:
329 | for commit in self._git.iter_commits(self.info.branch,
330 | max_count=limit, skip=skip):
331 | data.append(Update.parse(safe_url(self.info.url), commit))
332 |
333 | return data
334 |
335 | def delete(self) -> None:
336 | rmtree(self._path)
337 |
338 | @staticmethod
339 | def gen_path(path: str, url: str) -> str:
340 | return join(path, '.'.join(url.split('/')[-2:]))
341 |
342 | def edit(self, branch: Optional[str], version: Optional[Union[int, str]],
343 | priority: Optional[int]) -> bool:
344 | _changed = False
345 |
346 | if branch and self.info.branch != branch and self._branch_exists(branch):
347 | commit = self._get_commit(branch)
348 |
349 | self.info.branch = branch
350 | self.info.version = commit.hexsha if commit else ""
351 | self.info.count = self.info.max_count = commit.count() if commit else 0
352 |
353 | _changed = True
354 |
355 | elif version:
356 | commit = self._get_commit(version)
357 |
358 | if commit and self.info.version != commit.hexsha:
359 | self.info.version = commit.hexsha
360 | self.info.count = commit.count()
361 |
362 | _changed = True
363 |
364 | if isinstance(priority, int) and self.info.priority != priority:
365 | self.info.priority = priority
366 |
367 | Repos.sort()
368 | _changed = True
369 |
370 | if _changed:
371 | self._update()
372 |
373 | return _changed
374 |
375 | def _update(self) -> None:
376 | raise NotImplementedError
377 |
378 |
379 | class _CoreRepo(_BaseRepo):
380 | PATH = join(_CACHE_PATH, "core")
381 |
382 | _url = CORE_REPO
383 | _branch = CORE_BRANCH
384 |
385 | @classmethod
386 | def parse(cls, branch: str, version: str) -> '_CoreRepo':
387 | info = RepoInfo.parse(-1, -1, branch or cls._branch, version, cls._url)
388 | path = _BaseRepo.gen_path(cls.PATH, cls._url)
389 |
390 | return cls(info, path)
391 |
392 | def grab_req(self) -> Optional[List[str]]:
393 | req = join(self._path, "requirements.txt")
394 |
395 | if isfile(req):
396 | with open(req) as f:
397 | return f.read().strip().split()
398 |
399 | def grab_loader_version(self) -> Optional[str]:
400 | loader_ = join(self._path, "min_loader.txt")
401 |
402 | if isfile(loader_):
403 | with open(loader_) as f:
404 | return f.read().strip()
405 |
406 | def get_plugins(self) -> List[str]:
407 | cat_path = join(self._path, "plugins", "builtin")
408 |
409 | if exists(cat_path):
410 | return list(filter(lambda _: isdir(_) and not _.startswith("_"), os.listdir(cat_path)))
411 |
412 | return []
413 |
414 | def edit(self, branch: Optional[str], version: Optional[Union[int, str]], _=None) -> bool:
415 | return super().edit(branch, version, None)
416 |
417 | def reset(self) -> None:
418 | if self.info.branch == self._branch and self.info.version == "":
419 | return
420 |
421 | self.info.branch = self._branch
422 | self.info.version = ""
423 |
424 | self._update()
425 |
426 | def copy(self, source="userge", path="userge") -> None:
427 | super().copy(source, path)
428 |
429 | def _update(self) -> None:
430 | Database.get().config.update_one({'key': 'core'},
431 | {"$set": {'branch': self.info.branch,
432 | 'version': self.info.version}}, upsert=True)
433 | Sig.core_remove()
434 |
435 |
436 | class _PluginsRepo(_BaseRepo):
437 | PATH = join(_CACHE_PATH, "repos")
438 |
439 | _counter = count(1)
440 |
441 | def __init__(self, info: RepoInfo, path: str):
442 | super().__init__(info, path)
443 | self._plugins: List[_Plugin] = []
444 |
445 | @classmethod
446 | def parse(cls, priority: int, branch: str, version: str, url: str) -> '_PluginsRepo':
447 | info = RepoInfo.parse(next(cls._counter), priority, branch, version, url)
448 | path = _BaseRepo.gen_path(cls.PATH, url)
449 |
450 | return cls(info, path)
451 |
452 | def load_plugins(self) -> None:
453 | self._plugins.clear()
454 |
455 | plugins_path = join(self._path, "plugins")
456 | if not isdir(plugins_path):
457 | return
458 |
459 | for cat in os.listdir(plugins_path):
460 | cat_path = join(plugins_path, cat)
461 | if not isdir(cat_path) or cat == "builtin" or cat.startswith('_'):
462 | continue
463 |
464 | for plg in os.listdir(cat_path):
465 | plg_path = join(cat_path, plg)
466 | if not isdir(plg_path) or plg.startswith('_'):
467 | continue
468 |
469 | self._plugins.append(_Plugin.parse(plg_path, cat, plg, self.info))
470 |
471 | def iter_plugins(self) -> Iterator[_Plugin]:
472 | return iter(self._plugins)
473 |
474 | def _update(self) -> None:
475 | Database.get().repos.update_one({'url': self.info.url},
476 | {"$set": {'branch': self.info.branch,
477 | 'version': self.info.version,
478 | 'priority': self.info.priority}})
479 | Sig.repos_remove()
480 |
481 |
482 | class Repos:
483 | _core: Optional[_CoreRepo] = None
484 | _plugins: List[_PluginsRepo] = []
485 |
486 | _loaded = False
487 | _RE_REPO = re.compile(r"https://(?:ghp_[0-9A-z]{36}@)?github.com/[\w-]+/[\w.-]+$")
488 |
489 | @classmethod
490 | def load(cls) -> None:
491 | if cls._loaded:
492 | return
493 |
494 | db = Database.get()
495 |
496 | data = db.config.find_one({'key': 'core'})
497 | branch = data['branch'] if data else ""
498 | version = data['version'] if data else ""
499 | cls._core = _CoreRepo.parse(branch, version)
500 |
501 | for d in db.repos.find():
502 | repo = _PluginsRepo.parse(d['priority'], d['branch'], d['version'], d['url'])
503 | cls._plugins.append(repo)
504 |
505 | cls.sort()
506 | cls._loaded = True
507 |
508 | @classmethod
509 | def sort(cls) -> None:
510 | cls._plugins.sort(key=lambda _: _.info.priority)
511 |
512 | @classmethod
513 | def get_core(cls) -> Optional[_CoreRepo]:
514 | return cls._core
515 |
516 | @classmethod
517 | def get(cls, repo_id_or_url: Union[int, str]) -> Optional[_PluginsRepo]:
518 | is_id = isinstance(repo_id_or_url, int)
519 |
520 | for repo in cls._plugins:
521 | if is_id:
522 | if repo.info.id == repo_id_or_url:
523 | return repo
524 | else:
525 | if repo.info.url == repo_id_or_url:
526 | return repo
527 |
528 | @classmethod
529 | def has_repos(cls) -> bool:
530 | return len(cls._plugins) > 0
531 |
532 | @classmethod
533 | def iter_repos(cls) -> Iterator[_PluginsRepo]:
534 | return iter(cls._plugins)
535 |
536 | @classmethod
537 | def add(cls, priority: int, branch: str, url: str) -> bool:
538 | if not cls._RE_REPO.match(url) or cls.get(url):
539 | return False
540 |
541 | version = ""
542 |
543 | cls._plugins.append(_PluginsRepo.parse(priority, branch, version, url))
544 | cls.sort()
545 | Database.get().repos.insert_one({'priority': priority, 'branch': branch,
546 | 'version': version, 'url': url})
547 | Sig.repos_remove()
548 |
549 | return True
550 |
551 | @classmethod
552 | def remove(cls, repo_id: int) -> bool:
553 | repo = cls.get(repo_id)
554 | if repo:
555 | cls._plugins.remove(repo)
556 | Database.get().repos.delete_one({'url': repo.info.url})
557 | repo.delete()
558 | Sig.repos_remove()
559 |
560 | return True
561 |
562 | return False
563 |
564 |
565 | class _ConstraintData:
566 | def __init__(self, repo_name: Optional[str], plg_cat: Optional[str],
567 | plg_name: Optional[str], raw: str):
568 | self.repo_name = repo_name
569 | self.plg_cat = plg_cat
570 | self.plg_name = plg_name
571 | self.raw = raw
572 |
573 | @classmethod
574 | def parse(cls, data: str) -> '_ConstraintData':
575 | data = data.strip().lower()
576 | parts = data.split('/')
577 | size = len(parts)
578 |
579 | repo_name = None
580 | plg_cat = None
581 | plg_name = None
582 |
583 | # possible cases
584 | #
585 | # plg_name
586 | # plg_cat/
587 | # repo_name/plg_name
588 | # repo_name/plg_cat/
589 | #
590 |
591 | if size == 3:
592 | repo_name = parts[0]
593 | plg_cat = parts[1]
594 |
595 | elif size == 2:
596 | if parts[1]:
597 | repo_name = parts[0]
598 | plg_name = parts[1]
599 | else:
600 | plg_cat = parts[0]
601 |
602 | else:
603 | plg_name = parts[0]
604 |
605 | return cls(repo_name, plg_cat, plg_name, data)
606 |
607 | def match(self, repo_name: str, plg_cat: str, plg_name: str) -> bool:
608 | if self.repo_name and self.repo_name != repo_name:
609 | return False
610 |
611 | if self.plg_cat and self.plg_cat != plg_cat:
612 | return False
613 |
614 | if self.plg_name and self.plg_name != plg_name:
615 | return False
616 |
617 | if self.repo_name or self.plg_cat or self.plg_name:
618 | return True
619 |
620 | return False
621 |
622 | def __str__(self) -> str:
623 | return self.raw
624 |
625 |
626 | class _Constraint:
627 | def __init__(self):
628 | self._data: List[_ConstraintData] = []
629 |
630 | def add(self, data: List[str]) -> List[str]:
631 | added = []
632 |
633 | for d in set(map(lambda _: _.strip().lower(), data)):
634 | if all(map(lambda _: _.raw != d, self._data)):
635 | self._data.append(_ConstraintData.parse(d.strip()))
636 | added.append(d)
637 |
638 | return added
639 |
640 | def remove(self, data: List[str]) -> List[str]:
641 | removed = []
642 |
643 | for d in set(map(lambda _: _.strip().lower(), data)):
644 | for cd in self._data:
645 | if cd.raw == d:
646 | self._data.remove(cd)
647 | removed.append(d)
648 | break
649 |
650 | return removed
651 |
652 | def clear(self) -> int:
653 | size = len(self._data)
654 | self._data.clear()
655 |
656 | return size
657 |
658 | def to_constraint(self) -> Optional[Constraint]:
659 | if self._data:
660 | return Constraint(self.get_type(), self._to_str_list())
661 |
662 | def get_type(self) -> str:
663 | raise NotImplementedError
664 |
665 | def _to_str_list(self) -> List[str]:
666 | return list(map(str, self._data))
667 |
668 | def empty(self) -> bool:
669 | return len(self._data) == 0
670 |
671 | def match(self, *args: str) -> bool:
672 | for part in self._data:
673 | if part.match(*args):
674 | return True
675 |
676 | return False
677 |
678 | def __str__(self) -> str:
679 | return self.get_type() + '(' + str(self._to_str_list()) + ')'
680 |
681 |
682 | class _Include(_Constraint):
683 | def get_type(self) -> str:
684 | return "include"
685 |
686 |
687 | class _Exclude(_Constraint):
688 | def get_type(self) -> str:
689 | return "exclude"
690 |
691 |
692 | class _In(_Constraint):
693 | def get_type(self) -> str:
694 | return "in"
695 |
696 |
697 | class _Constraints:
698 | def __init__(self, *data: _Constraint):
699 | self._data = data
700 |
701 | def get(self, c_type: str) -> Optional[_Constraint]:
702 | c_type = c_type.strip().lower()
703 |
704 | for const in self._data:
705 | if const.get_type() == c_type:
706 | return const
707 |
708 | def remove(self, data: List[str]) -> List[str]:
709 | removed = []
710 |
711 | for const in self._data:
712 | removed.extend(const.remove(data))
713 |
714 | return removed
715 |
716 | def clear(self) -> int:
717 | _count = 0
718 |
719 | for const in self._data:
720 | _count += const.clear()
721 |
722 | return _count
723 |
724 | def to_constraints(self) -> List[Constraint]:
725 | return list(filter(None, map(_Constraint.to_constraint, self._data)))
726 |
727 | def match(self, *args: str) -> Optional[_Constraint]:
728 | for const in self._data:
729 | if const.empty():
730 | continue
731 |
732 | if isinstance(const, _Include):
733 | if const.match(*args):
734 | break
735 |
736 | elif isinstance(const, _Exclude):
737 | if const.match(*args):
738 | return const
739 |
740 | elif isinstance(const, _In):
741 | if not const.match(*args):
742 | return const
743 |
744 |
745 | class Constraints:
746 | _data = _Constraints(_Include(), _Exclude(), _In())
747 | _loaded = False
748 |
749 | @classmethod
750 | def load(cls) -> None:
751 | if cls._loaded:
752 | return
753 |
754 | for d in Database.get().constraint.find():
755 | c_type = d['type']
756 | data = d['data']
757 |
758 | const = cls._data.get(c_type)
759 |
760 | if const:
761 | const.add([data])
762 |
763 | cls._loaded = True
764 |
765 | @classmethod
766 | def add(cls, c_type: str, data: List[str]) -> bool:
767 | const = cls._data.get(c_type)
768 |
769 | if not const:
770 | return False
771 |
772 | to_add = const.add(data)
773 |
774 | if to_add:
775 | Database.get().constraint.insert_many(
776 | map(lambda _: dict(type=const.get_type(), data=_), to_add))
777 |
778 | Sig.repos_remove()
779 |
780 | return True
781 |
782 | return False
783 |
784 | @classmethod
785 | def remove(cls, c_type: Optional[str], data: List[str]) -> bool:
786 | if c_type:
787 | const = cls._data.get(c_type)
788 |
789 | if not const:
790 | return False
791 |
792 | to_remove = const.remove(data)
793 | else:
794 | to_remove = cls._data.remove(data)
795 |
796 | if to_remove:
797 | _data = {'data': {'$in': to_remove}}
798 |
799 | if c_type:
800 | _data['type'] = c_type.strip().lower()
801 |
802 | Database.get().constraint.delete_many(_data)
803 | Sig.repos_remove()
804 |
805 | return True
806 |
807 | return False
808 |
809 | @classmethod
810 | def clear(cls, c_type: Optional[str]) -> bool:
811 | if c_type:
812 | const = cls._data.get(c_type)
813 |
814 | if not const:
815 | return False
816 |
817 | _count = const.clear()
818 | else:
819 | _count = cls._data.clear()
820 |
821 | if _count:
822 | Database.get().constraint.drop()
823 | Sig.repos_remove()
824 |
825 | return True
826 |
827 | return False
828 |
829 | @classmethod
830 | def get(cls) -> List[Constraint]:
831 | return cls._data.to_constraints()
832 |
833 | @classmethod
834 | def match(cls, plg: _Plugin) -> Optional[_Constraint]:
835 | return cls._data.match(plg.repo_name.lower(), plg.cat.lower(), plg.name.lower())
836 |
837 |
838 | class Sig:
839 | _core = join(_CACHE_PATH, ".sig_core")
840 | _repos = join(_CACHE_PATH, ".sig_repos")
841 |
842 | @staticmethod
843 | def _make(path: str) -> None:
844 | if not exists(path):
845 | open(path, 'w').close()
846 |
847 | @classmethod
848 | def core_exists(cls) -> bool:
849 | return exists(cls._core)
850 |
851 | @classmethod
852 | def core_make(cls) -> None:
853 | cls._make(cls._core)
854 |
855 | @classmethod
856 | def core_remove(cls) -> None:
857 | remove(cls._core)
858 |
859 | @classmethod
860 | def repos_exists(cls) -> bool:
861 | return exists(cls._repos)
862 |
863 | @classmethod
864 | def repos_make(cls) -> None:
865 | cls._make(cls._repos)
866 |
867 | @classmethod
868 | def repos_remove(cls) -> None:
869 | remove(cls._repos)
870 |
871 |
872 | class Cache:
873 | _core = _CoreRepo.PATH
874 | _repos = _PluginsRepo.PATH
875 |
876 | @classmethod
877 | def core_remove(cls) -> None:
878 | rmtree(cls._core)
879 |
880 | @classmethod
881 | def repos_remove(cls) -> None:
882 | rmtree(cls._repos)
883 |
884 |
885 | class Requirements:
886 | _data = set()
887 |
888 | @classmethod
889 | def size(cls) -> int:
890 | return len(cls._data)
891 |
892 | @classmethod
893 | def update(cls, data: Optional[Iterable[str]]) -> None:
894 | if data:
895 | cls._data.update(filter(None, map(str.strip, data)))
896 |
897 | @classmethod
898 | def install(cls) -> Tuple[int, str]:
899 | if cls._data:
900 | data = cls._data.copy()
901 | cls._data.clear()
902 |
903 | cls._install('--upgrade', 'pip')
904 | return cls._install('--no-warn-script-location', *data)
905 |
906 | return 0, ''
907 |
908 | @staticmethod
909 | def _install(*args: str) -> Tuple[int, str]:
910 | return call(sys.executable, '-m', 'pip', 'install', *args)
911 |
912 |
913 | class Tasks:
914 | _handlers: Dict[int, Callable] = {}
915 |
916 | @classmethod
917 | def add(cls, job: int, callback: Callable) -> None:
918 | cls._handlers[job] = callback
919 |
920 | @classmethod
921 | def handle(cls, job: int, *arg) -> object:
922 | try:
923 | return cls._handlers[job](*arg)
924 | except KeyError:
925 | return KeyError(f"Invalid job id: {job}")
926 | except Exception as e:
927 | return e
928 |
929 |
930 | class Session:
931 | _init = True
932 | _restart = False
933 | _process: Optional[Process] = None
934 |
935 | @classmethod
936 | def should_init(cls) -> bool:
937 | if cls._init:
938 | cls._init = False
939 | return True
940 |
941 | return False
942 |
943 | @classmethod
944 | def should_restart(cls) -> bool:
945 | if cls._restart:
946 | cls._restart = False
947 | return True
948 |
949 | return False
950 |
951 | @classmethod
952 | def set_process(cls, p: Process) -> None:
953 | cls._process = p
954 |
955 | if exists(CONF_PATH):
956 | assert_write(CONF_PATH, True)
957 |
958 | @classmethod
959 | def terminate(cls) -> None:
960 | if cls._process:
961 | try:
962 | terminate(cls._process.pid)
963 | except ValueError:
964 | raise KeyboardInterrupt
965 |
966 | @classmethod
967 | def restart(cls, should_init: bool) -> None:
968 | cls._init = should_init
969 | cls._restart = True
970 |
971 | cls.terminate()
972 |
--------------------------------------------------------------------------------
/loader/core/utils.py:
--------------------------------------------------------------------------------
1 | __all__ = ['log', 'error', 'terminate', 'call', 'open_url', 'get_client_type',
2 | 'safe_url', 'safe_repo_info', 'grab_conflicts', 'assert_read', 'assert_write',
3 | 'assert_read_write', 'remove', 'rmtree', 'clean_core', 'clean_plugins', 'print_logo']
4 |
5 | import logging
6 | import os
7 | import re
8 | import stat
9 | import subprocess
10 | from copy import copy
11 | from functools import lru_cache
12 | from itertools import combinations
13 | from logging.handlers import RotatingFileHandler
14 | from os.path import join
15 | from shutil import rmtree as _rmtree
16 | from typing import Optional, Tuple, Set, Dict, Any
17 | from urllib.error import HTTPError
18 | from urllib.request import urlopen, Request
19 | try:
20 | from signal import CTRL_C_EVENT as SIGTERM
21 | except ImportError:
22 | from signal import SIGTERM
23 |
24 | from loader.types import RepoInfo
25 |
26 | if not os.path.exists('logs'):
27 | os.mkdir('logs')
28 |
29 | logging.basicConfig(level=logging.INFO,
30 | format='[%(asctime)s - %(levelname)s] - %(name)s - %(message)s',
31 | datefmt='%d-%b-%y %H:%M:%S',
32 | handlers=[
33 | RotatingFileHandler(
34 | "logs/loader.log", maxBytes=81920, backupCount=10),
35 | logging.StreamHandler()
36 | ])
37 |
38 | _LOG = logging.getLogger("loader")
39 |
40 |
41 | def log(msg: str) -> None:
42 | _LOG.info(msg)
43 |
44 |
45 | def error(msg: str, hint: Optional[str] = None, interrupt=True) -> None:
46 | _LOG.error(msg + "\n\tHINT: " + hint if hint else msg)
47 | if interrupt:
48 | raise KeyboardInterrupt
49 |
50 |
51 | def terminate(pid: int) -> None:
52 | os.kill(pid, SIGTERM)
53 |
54 |
55 | def call(*args: str) -> Tuple[int, str]:
56 | p = subprocess.Popen(args, stdout=subprocess.DEVNULL, stderr=subprocess.PIPE, text=True)
57 | return p.wait(), p.communicate()[1]
58 |
59 |
60 | def open_url(url: str, headers: Optional[dict] = None) -> Tuple[Any, Optional[str]]:
61 | r = Request(url, headers=headers or {})
62 | try:
63 | return urlopen(r, timeout=3), None
64 | except HTTPError as e:
65 | return e.code, e.reason
66 |
67 |
68 | def get_client_type() -> str:
69 | token = os.environ.get('BOT_TOKEN')
70 | string = os.environ.get('SESSION_STRING')
71 |
72 | if token and string:
73 | return "dual"
74 | if token:
75 | return "bot"
76 | if string:
77 | return "user"
78 |
79 |
80 | _TOKEN = re.compile("ghp_[0-9A-z]{36}")
81 | _REQUIREMENTS = re.compile(r'(\S+)(<=|<|==|>=|>|!=|~=)(\S+)')
82 |
83 |
84 | @lru_cache
85 | def safe_url(url: str) -> str:
86 | return _TOKEN.sub('private', url)
87 |
88 |
89 | def safe_repo_info(repo_info: RepoInfo) -> RepoInfo:
90 | info = copy(repo_info)
91 | info.url = safe_url(info.url)
92 |
93 | return info
94 |
95 |
96 | def grab_conflicts(requirements: Set[str]) -> Set[str]:
97 | to_audit: Dict[str, Dict[str, Set[str]]] = {}
98 |
99 | for req in filter(lambda _: any(map(_.__contains__, ('=', '>', '<'))), requirements):
100 | match = _REQUIREMENTS.match(req)
101 |
102 | name = match.group(1)
103 | cond = match.group(2)
104 | version = match.group(3)
105 |
106 | if name not in to_audit:
107 | to_audit[name] = {}
108 |
109 | versions = to_audit[name]
110 |
111 | if version not in versions:
112 | versions[version] = set()
113 |
114 | version = versions[version]
115 |
116 | # :)
117 | if cond == '~=':
118 | cond = '>='
119 |
120 | version.add(cond)
121 |
122 | gt, ge, eq, le, lt, neq = '>', '>=', '==', '<=', '<', '!='
123 |
124 | sequence = (
125 | (gt, ge, neq),
126 | (ge, eq, le),
127 | (le, lt, neq)
128 | )
129 |
130 | pattern = []
131 |
132 | for i in range(len(sequence)):
133 | seq = sequence[i]
134 |
135 | pattern.append(seq)
136 | pattern.extend(combinations(seq, 2))
137 |
138 | for j in range(i + 1):
139 | pattern.append((seq[j],))
140 |
141 | pattern = tuple(pattern)
142 |
143 | for name, versions in to_audit.items():
144 | found = False
145 |
146 | for version in sorted(versions, reverse=True):
147 | args = versions[version]
148 |
149 | if found:
150 | # find and remove compatible reqs
151 | for _ in sequence[0]:
152 | if _ in args:
153 | args.remove(_)
154 | else:
155 | for check in pattern:
156 | if all(map(args.__contains__, check)):
157 | # found the breakpoint
158 | for _ in check:
159 | args.remove(_)
160 |
161 | if not any(map(check.__contains__, sequence[2])):
162 | found = True
163 |
164 | break
165 |
166 | conflicts = set()
167 |
168 | for name, versions in to_audit.items():
169 | for version, args in versions.items():
170 | for arg in args:
171 | conflicts.add(name + arg + version)
172 |
173 | return conflicts
174 |
175 |
176 | def _perm(path: str, check: Optional[int], perm: int) -> bool:
177 | return bool(check and os.access(path, check) or os.chmod(path, perm))
178 |
179 |
180 | def assert_read(path: str) -> bool:
181 | return _perm(path, os.R_OK, stat.S_IREAD)
182 |
183 |
184 | def assert_write(path: str, force=False) -> bool:
185 | return _perm(path, None if force else os.W_OK, stat.S_IWRITE)
186 |
187 |
188 | def assert_read_write(path: str) -> bool:
189 | return _perm(path, os.R_OK | os.W_OK, stat.S_IREAD | stat.S_IWRITE)
190 |
191 |
192 | def _on_error(func, path, _) -> None:
193 | if os.path.exists(path) and not assert_write(path):
194 | func(path)
195 |
196 |
197 | def remove(path: str) -> None:
198 | if os.path.exists(path):
199 | assert_write(path)
200 | os.remove(path)
201 |
202 |
203 | def rmtree(path: str) -> None:
204 | if os.path.isdir(path):
205 | _rmtree(path, onerror=_on_error)
206 |
207 |
208 | def clean_core() -> None:
209 | rmtree("userge")
210 |
211 |
212 | def clean_plugins() -> None:
213 | plugins_path = join("userge", "plugins")
214 |
215 | for cat in os.listdir(plugins_path):
216 | if cat == "builtin":
217 | continue
218 |
219 | rmtree(join(plugins_path, cat))
220 |
221 |
222 | def _print_line():
223 | log('->- ->- ->- ->- ->- ->- ->- --- -<- -<- -<- -<- -<- -<- -<-')
224 |
225 |
226 | def print_logo():
227 | _print_line()
228 |
229 | logo = r'''
230 | ________ __ __ ______
231 | /_ __/ /_ ___ / / / /_______ _____/ ____/__
232 | / / / __ \/ _ \ / / / / ___/ _ \/ ___/ / __/ _ \
233 | / / / / / / __/ / /_/ (__ ) __/ / / /_/ / __/
234 | /_/ /_/ /_/\___/ \____/____/\___/_/ \____/\___/
235 | '''
236 | log(logo)
237 |
238 | _print_line()
239 |
--------------------------------------------------------------------------------
/loader/job.py:
--------------------------------------------------------------------------------
1 | SOFT_RESTART = 1
2 | HARD_RESTART = 2
3 |
4 | FETCH_CORE = 3
5 | FETCH_REPO = 4
6 | FETCH_REPOS = 5
7 |
8 | GET_CORE = 6
9 | GET_REPO = 7
10 | GET_REPOS = 8
11 |
12 | ADD_REPO = 9
13 | REMOVE_REPO = 10
14 |
15 | GET_CORE_NEW_COMMITS = 11
16 | GET_CORE_OLD_COMMITS = 12
17 |
18 | GET_REPO_NEW_COMMITS = 13
19 | GET_REPO_OLD_COMMITS = 14
20 |
21 | EDIT_CORE = 15
22 | EDIT_REPO = 16
23 |
24 | ADD_CONSTRAINTS = 17
25 | REMOVE_CONSTRAINTS = 18
26 | GET_CONSTRAINTS = 19
27 | CLEAR_CONSTRAINTS = 20
28 |
29 | INVALIDATE_REPOS_CACHE = 21
30 |
31 | SET_ENV = 22
32 | UNSET_ENV = 23
33 |
--------------------------------------------------------------------------------
/loader/types.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 |
3 | from git import Commit
4 |
5 |
6 | class RepoInfo:
7 | def __init__(self, id_: int, name: str, priority: int, branch: str, version: str, url: str):
8 | self.id = id_
9 | self.name = name
10 | self.priority = priority
11 | self.branch = branch
12 | self.version = version
13 | self.url = url
14 | self.count = 0
15 | self.max_count = 0
16 | self.branches = []
17 |
18 | @classmethod
19 | def parse(cls, id_: int, priority: int, branch: str,
20 | version: str, url: str) -> 'RepoInfo':
21 | name = '.'.join(url.split('/')[-2:])
22 |
23 | return cls(id_, name, priority, branch, version, url)
24 |
25 | @property
26 | def head_url(self) -> str:
27 | return self.url.rstrip('/') + "/commit/" + self.version
28 |
29 | def __repr__(self) -> str:
30 | return (f"")
32 |
33 |
34 | class Update:
35 | def __init__(self, summary: str, author: str, version: str,
36 | count: int, url: str):
37 | self.summary = summary
38 | self.author = author
39 | self.version = version
40 | self.count = count
41 | self.url = url
42 |
43 | @classmethod
44 | def parse(cls, repo_url: str, commit: Commit) -> 'Update':
45 | summary = str(commit.summary)
46 |
47 | author = commit.author.name
48 | if author == "None":
49 | author = commit.committer.name
50 |
51 | version = commit.hexsha
52 | count = commit.count()
53 | url = repo_url.rstrip('/') + "/commit/" + version
54 |
55 | return cls(summary, author, version, count, url)
56 |
57 | def __repr__(self) -> str:
58 | return (f"")
60 |
61 |
62 | class Constraint:
63 | def __init__(self, type_: str, data: List[str]):
64 | self.type = type_
65 | self.data = data
66 |
67 | def __repr__(self) -> str:
68 | return f""
69 |
--------------------------------------------------------------------------------
/loader/userge/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/UsergeTeam/Loader/2e659b77ca694d5039b9d1912663c17e67c68ab3/loader/userge/__init__.py
--------------------------------------------------------------------------------
/loader/userge/api.py:
--------------------------------------------------------------------------------
1 | __all__ = [
2 | 'restart',
3 | 'fetch_core',
4 | 'fetch_repo',
5 | 'fetch_repos',
6 | 'get_core',
7 | 'get_repo',
8 | 'get_repos',
9 | 'add_repo',
10 | 'remove_repo',
11 | 'get_core_new_commits',
12 | 'get_core_old_commits',
13 | 'get_repo_new_commits',
14 | 'get_repo_old_commits',
15 | 'edit_core',
16 | 'set_core_branch',
17 | 'set_core_version',
18 | 'edit_repo',
19 | 'set_repo_branch',
20 | 'set_repo_version',
21 | 'set_repo_priority',
22 | 'add_constraints',
23 | 'remove_constraints',
24 | 'get_constraints',
25 | 'clear_constraints',
26 | 'invalidate_repos_cache',
27 | 'set_env',
28 | 'unset_env']
29 |
30 | from typing import List, Optional, Union
31 |
32 | from loader.job import *
33 | from loader.types import RepoInfo, Update, Constraint
34 | from .connection import send_and_wait, send_and_async_wait
35 |
36 |
37 | def restart(hard: bool = False) -> None:
38 | """
39 | terminate the current process and start new process
40 | Args:
41 | hard: if False the initialization things will be ignored. default to False.
42 | """
43 | send_and_wait(HARD_RESTART if hard else SOFT_RESTART)
44 |
45 |
46 | async def fetch_core() -> None:
47 | """
48 | fetch data from the default remote and update the local core repository.
49 | """
50 | return await send_and_async_wait(FETCH_CORE)
51 |
52 |
53 | async def fetch_repo(repo_id: int) -> None:
54 | """
55 | fetch data from the default remote and update this local plugins repository.
56 | Args:
57 | repo_id: id from the RepoInfo object. hint: get_repos()
58 | """
59 | return await send_and_async_wait(FETCH_REPO, repo_id)
60 |
61 |
62 | async def fetch_repos() -> None:
63 | """
64 | fetch data from the default remote and update all the local plugins repositories.
65 | """
66 | return await send_and_async_wait(FETCH_REPOS)
67 |
68 |
69 | async def get_core() -> Optional[RepoInfo]:
70 | """
71 | get the core repo details.
72 | Returns:
73 | details as a RepoInfo object.
74 | """
75 | return await send_and_async_wait(GET_CORE)
76 |
77 |
78 | async def get_repo(repo_id: int) -> Optional[RepoInfo]:
79 | """
80 | get details of the plugins repos.
81 | Args:
82 | repo_id: id from the RepoInfo object. hint: get_repos()
83 |
84 | Returns:
85 | RepoInfo objects.
86 | """
87 | return await send_and_async_wait(GET_REPO, repo_id)
88 |
89 |
90 | async def get_repos() -> List[RepoInfo]:
91 | """
92 | get details of all the plugins repos.
93 | Returns:
94 | list of RepoInfo objects.
95 | """
96 | return await send_and_async_wait(GET_REPOS)
97 |
98 |
99 | async def add_repo(priority: int, branch: str, url: str) -> bool:
100 | """
101 | add a plugins repo.
102 | Args:
103 | priority: priority of this repo. using this number the loader will sort the repos list.
104 | the loader will override plugins with the same name. so if you don't want to override
105 | the plugins of this repo then use higher priority.
106 | branch: branch name of this repo.
107 | url: link to the repo
108 |
109 | Returns:
110 | True if success and else False.
111 | """
112 | return await send_and_async_wait(ADD_REPO, priority, branch, url)
113 |
114 |
115 | async def remove_repo(repo_id: int) -> bool:
116 | """
117 | remove a plugins repo by its id.
118 | Args:
119 | repo_id: id from the RepoInfo object. hint: get_repos()
120 |
121 | Returns:
122 | True if success and else False.
123 | """
124 | return await send_and_async_wait(REMOVE_REPO, repo_id)
125 |
126 |
127 | async def get_core_new_commits() -> Optional[List[Update]]:
128 | """
129 | get new commits to the current branch of core repo compared to the current version.
130 | Returns:
131 | list of Update objects.
132 | """
133 | return await send_and_async_wait(GET_CORE_NEW_COMMITS)
134 |
135 |
136 | async def get_core_old_commits(limit: int) -> Optional[List[Update]]:
137 | """
138 | get old commits from the current branch of core repo compared to the current version.
139 | Args:
140 | limit: specify how many commits you want.
141 |
142 | Returns:
143 | list of Update objects.
144 | """
145 | return await send_and_async_wait(GET_CORE_OLD_COMMITS, limit)
146 |
147 |
148 | async def get_repo_new_commits(repo_id: int) -> Optional[List[Update]]:
149 | """
150 | get new commits to the current branch of plugins repo compared to the current version.
151 | Args:
152 | repo_id: id from the RepoInfo object. hint: get_repos()
153 |
154 | Returns:
155 | list of Update objects.
156 | """
157 | return await send_and_async_wait(GET_REPO_NEW_COMMITS, repo_id)
158 |
159 |
160 | async def get_repo_old_commits(repo_id: int, limit: int) -> Optional[List[Update]]:
161 | """
162 | get old commits from the current branch of plugins repo compared to the current version.
163 | Args:
164 | repo_id: id from the RepoInfo object. hint: get_repos()
165 | limit: specify how many commits you want.
166 |
167 | Returns:
168 | list of Update objects.
169 | """
170 | return await send_and_async_wait(GET_REPO_OLD_COMMITS, repo_id, limit)
171 |
172 |
173 | async def edit_core(branch: Optional[str], version: Optional[Union[int, str]]) -> bool:
174 | """
175 | edit the core repo branch and version.
176 | Args:
177 | branch: branch name. hint: get_core() to see available branches.
178 | if None stay as same.
179 | version: version as hash or id. hint: get_core_new_commits() or get_core_old_commits().
180 | if None stay as same.
181 |
182 | Returns:
183 | True if changed.
184 | """
185 | return await send_and_async_wait(EDIT_CORE, branch, version)
186 |
187 |
188 | async def set_core_branch(branch: str) -> bool:
189 | """
190 | change the core repo branch.
191 | Args:
192 | branch: branch name. hint: get_core() to see available branches.
193 |
194 | Returns:
195 | True if changed.
196 | """
197 | return await edit_core(branch, None)
198 |
199 |
200 | async def set_core_version(version: Union[int, str]) -> bool:
201 | """
202 | change the core repo version.
203 | Args:
204 | version: version as hash or id. hint: get_core_new_commits() or get_core_old_commits().
205 |
206 | Returns:
207 | True if changed.
208 | """
209 | return await edit_core(None, version)
210 |
211 |
212 | async def edit_repo(repo_id: int, branch: Optional[str],
213 | version: Optional[Union[int, str]], priority: Optional[int]) -> bool:
214 | """
215 | edit the plugins repo branch, version and priority.
216 | Args:
217 | repo_id: id from the RepoInfo object. hint: get_repos().
218 | if None stay as same.
219 | branch: branch name. hint: get_repos() to see available branches.
220 | if None stay as same.
221 | version: version as hash or id. hint: get_repo_new_commits() or get_repo_old_commits().
222 | if None stay as same.
223 | priority: priority of this repo. hint: see docs of add_repo().
224 | if None stay as same.
225 |
226 | Returns:
227 | True if changed.
228 | """
229 | return await send_and_async_wait(EDIT_REPO, repo_id, branch, version, priority)
230 |
231 |
232 | async def set_repo_branch(repo_id: int, branch: str) -> bool:
233 | """
234 | change the plugins repo branch.
235 | Args:
236 | repo_id: id from the RepoInfo object. hint: get_repos()
237 | branch: branch name. hint: get_repos() to see available branches.
238 |
239 | Returns:
240 | True if changed.
241 | """
242 | return await edit_repo(repo_id, branch, None, None)
243 |
244 |
245 | async def set_repo_version(repo_id: int, version: Union[int, str]) -> bool:
246 | """
247 | change the plugins repo version.
248 | Args:
249 | repo_id: id from the RepoInfo object. hint: get_repos()
250 | version: version as hash or id. hint: get_repo_new_commits() or get_repo_old_commits().
251 |
252 | Returns:
253 | True if changed.
254 | """
255 | return await edit_repo(repo_id, None, version, None)
256 |
257 |
258 | async def set_repo_priority(repo_id: int, priority: int) -> bool:
259 | """
260 | change the plugins repo priority.
261 | Args:
262 | repo_id: id from the RepoInfo object. hint: get_repos()
263 | priority: priority of this repo. hint: see docs of add_repo().
264 |
265 | Returns:
266 | True if changed.
267 | """
268 | return await edit_repo(repo_id, None, None, priority)
269 |
270 |
271 | async def add_constraints(c_type: str, data: List[str]) -> bool:
272 | """
273 | add constraints to filter plugins or categories.
274 | Args:
275 | c_type: constraint type which can be `include`, `exclude` or `in`. the sequence of the
276 | filter process is, first check for `include` constraints. if found then
277 | the filter process will be terminated and the plugins or the categories
278 | will be added to the project. if not found (`include`) then the loader will check
279 | for `exclude` constraints. if found then those plugins or categories will be ignored.
280 | if not found (`exclude`) then the loader will check for `in` constraints. if found
281 | then the loader will add only plugins and categories in this constraint. other all
282 | things will be ignored.
283 | data: list of constraints. a constraint can be
284 | a plugin name (ping),
285 | a category name (admin/),
286 | a repo name followed by a plugin name (usergeteam.userge-plugins/ping) and
287 | a repo name followed by a category name (usergeteam.userge-plugins/admin/).
288 |
289 | Returns:
290 | True if success and else False.
291 | """
292 | return await send_and_async_wait(ADD_CONSTRAINTS, c_type, data)
293 |
294 |
295 | async def remove_constraints(c_type: Optional[str], data: List[str]) -> bool:
296 | """
297 | remove added constraints.
298 | Args:
299 | c_type: constraint type. hint: see docs of add_constraints().
300 | if None then all the given constraint data will be removed without considering
301 | the constraint type. if its there, then it will limit to the constraint type.
302 | data: list of constraints. hint: see docs of add_constraints().
303 |
304 | Returns:
305 | True if success and else False.
306 | """
307 | return await send_and_async_wait(REMOVE_CONSTRAINTS, c_type, data)
308 |
309 |
310 | async def get_constraints() -> List[Constraint]:
311 | """
312 | get all added constraints.
313 | Returns:
314 | list of Constraint objects.
315 | """
316 | return await send_and_async_wait(GET_CONSTRAINTS)
317 |
318 |
319 | async def clear_constraints(c_type: Optional[str]) -> bool:
320 | """
321 | clear all added constraints.
322 | Args:
323 | c_type: constraint type. hint: see docs of add_constraints(). if None then this will
324 | clear the all constraints. else, the constraint only related to this type
325 | will be cleared.
326 |
327 | Returns:
328 | True if success and else False.
329 | """
330 | return await send_and_async_wait(CLEAR_CONSTRAINTS, c_type)
331 |
332 |
333 | async def invalidate_repos_cache() -> None:
334 | """
335 | notify the loader that the plugins should re-initialize.
336 | """
337 | return await send_and_async_wait(INVALIDATE_REPOS_CACHE)
338 |
339 |
340 | async def set_env(key: str, value: str) -> None:
341 | """
342 | set an environment variable.
343 | Args:
344 | key: key of the var
345 | value: value of the var
346 | """
347 | return await send_and_async_wait(SET_ENV, key, value)
348 |
349 |
350 | async def unset_env(key: str) -> None:
351 | """
352 | remove an environment variable.
353 | Args:
354 | key: key of the var
355 | """
356 | return await send_and_async_wait(UNSET_ENV, key)
357 |
--------------------------------------------------------------------------------
/loader/userge/connection.py:
--------------------------------------------------------------------------------
1 | __all__ = ['send_and_wait', 'send_and_async_wait']
2 |
3 | import asyncio
4 | import atexit
5 | from threading import Lock
6 |
7 |
8 | _LOCK = Lock()
9 | _A_LOCK = asyncio.Lock()
10 |
11 |
12 | def send_and_wait(*_):
13 | with _LOCK:
14 | _send(*_)
15 | return _recv()
16 |
17 |
18 | async def send_and_async_wait(*_):
19 | async with _A_LOCK:
20 | with _LOCK:
21 | _send(*_)
22 | while not _poll():
23 | await asyncio.sleep(0.5)
24 | return _recv()
25 |
26 |
27 | def _send(*_) -> None:
28 | if _poll():
29 | raise Exception("connection is being used!")
30 | _Conn.send(_)
31 |
32 |
33 | def _recv():
34 | result = _Conn.recv()
35 | if isinstance(result, Exception):
36 | raise result
37 | return result
38 |
39 |
40 | def _poll() -> bool:
41 | return _Conn.poll()
42 |
43 |
44 | def _set(conn) -> None:
45 | _Conn.set(conn)
46 |
47 |
48 | class _Conn:
49 | _instance = None
50 |
51 | @classmethod
52 | def set(cls, conn) -> None:
53 | if cls._instance:
54 | cls._instance.close()
55 | cls._instance = conn
56 |
57 | @classmethod
58 | def _get(cls):
59 | if not cls._instance:
60 | raise Exception("connection not found!")
61 | if cls._instance.closed:
62 | raise Exception("connection has been closed!")
63 | return cls._instance
64 |
65 | @classmethod
66 | def send(cls, _) -> None:
67 | cls._get().send(_)
68 |
69 | @classmethod
70 | def recv(cls):
71 | return cls._get().recv()
72 |
73 | @classmethod
74 | def poll(cls) -> bool:
75 | return cls._get().poll()
76 |
77 | @classmethod
78 | def close(cls) -> None:
79 | if cls._instance:
80 | cls._instance.close()
81 | cls._instance = None
82 |
83 |
84 | atexit.register(_Conn.close)
85 |
--------------------------------------------------------------------------------
/loader/userge/main.py:
--------------------------------------------------------------------------------
1 | from importlib import import_module
2 | from os.path import abspath
3 | from sys import argv
4 |
5 |
6 | def run(conn) -> None:
7 | argv[0] = abspath("userge")
8 | getattr(import_module("loader.userge.connection"), '_set')(conn)
9 | getattr(getattr(import_module("userge.main"), 'userge'), 'begin')()
10 |
--------------------------------------------------------------------------------
/menu:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | exec python3 -m loader menu
4 |
--------------------------------------------------------------------------------
/menu.bat:
--------------------------------------------------------------------------------
1 | @echo off
2 |
3 | start python -m loader menu
4 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | dnspython
2 | gitpython
3 | motor
4 | pyrogram>=2.0.30
5 | python-dotenv
6 |
--------------------------------------------------------------------------------
/run:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | exec python3 -m loader "$@"
4 |
--------------------------------------------------------------------------------
/run.bat:
--------------------------------------------------------------------------------
1 | @echo off
2 |
3 | start python -m loader
4 |
--------------------------------------------------------------------------------
/tools/genstr.py:
--------------------------------------------------------------------------------
1 | # pylint: disable=invalid-name, missing-module-docstring
2 | #
3 | # Copyright (C) 2020-2022 by UsergeTeam@Github, < https://github.com/UsergeTeam >.
4 | #
5 | # This file is part of < https://github.com/UsergeTeam/Userge > project,
6 | # and is released under the "GNU v3.0 License Agreement".
7 | # Please see < https://github.com/UsergeTeam/Userge/blob/master/LICENSE >
8 | #
9 | # All rights reserved.
10 |
11 | import asyncio
12 | import os
13 |
14 | from dotenv import load_dotenv
15 | from pyrogram import Client
16 | from pyrogram.errors import UserIsBot
17 |
18 | if os.path.isfile("config.env"):
19 | load_dotenv("config.env")
20 |
21 |
22 | async def string() -> None: # pylint: disable=missing-function-docstring
23 | async with Client(
24 | "Userge",
25 | api_id=int(os.environ.get("API_ID") or input("Enter Telegram APP ID: ")),
26 | api_hash=os.environ.get("API_HASH") or input("Enter Telegram API HASH: "),
27 | in_memory=True,
28 | ) as userge:
29 | print("\nprocessing...")
30 | out = "sent to saved messages!"
31 | try:
32 | await userge.send_message(
33 | "me", f"#USERGE #SESSION_STRING\n\n`{await userge.export_session_string()}`"
34 | )
35 | except UserIsBot:
36 | out = "successfully printed!"
37 | print(await userge.export_session_string())
38 | print(f"Done !, session string has been {out}")
39 |
40 |
41 | if __name__ == "__main__":
42 | asyncio.get_event_loop().run_until_complete(string())
43 |
--------------------------------------------------------------------------------