getLogger instead of calling logging.
methods
This commit is contained in:
parent
1e4fdd2e89
commit
134a27b43d
5 changed files with 44 additions and 37 deletions
|
@ -4,7 +4,6 @@ and allows to manage your account"""
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import hashlib
|
import hashlib
|
||||||
import logging
|
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
|
|
||||||
|
@ -13,6 +12,8 @@ from typing import Optional, Type
|
||||||
|
|
||||||
import lxml.html
|
import lxml.html
|
||||||
|
|
||||||
|
from .atlog import log
|
||||||
|
|
||||||
from .atconnect import AternosConnect
|
from .atconnect import AternosConnect
|
||||||
from .atconnect import BASE_URL, AJAX_URL
|
from .atconnect import BASE_URL, AJAX_URL
|
||||||
|
|
||||||
|
@ -214,7 +215,7 @@ class Client:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
file = os.path.expanduser(file)
|
file = os.path.expanduser(file)
|
||||||
logging.debug('Restoring session from %s', file)
|
log.debug('Restoring session from %s', file)
|
||||||
|
|
||||||
if not os.path.exists(file):
|
if not os.path.exists(file):
|
||||||
raise FileNotFoundError()
|
raise FileNotFoundError()
|
||||||
|
@ -302,7 +303,7 @@ class Client:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
file = os.path.expanduser(file)
|
file = os.path.expanduser(file)
|
||||||
logging.debug('Saving session to %s', file)
|
log.debug('Saving session to %s', file)
|
||||||
|
|
||||||
with open(file, 'wt', encoding='utf-8') as f:
|
with open(file, 'wt', encoding='utf-8') as f:
|
||||||
|
|
||||||
|
@ -323,12 +324,12 @@ class Client:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
file = os.path.expanduser(file)
|
file = os.path.expanduser(file)
|
||||||
logging.debug('Removing session file: %s', file)
|
log.debug('Removing session file: %s', file)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
os.remove(file)
|
os.remove(file)
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
logging.warning('Unable to delete session file: %s', err)
|
log.warning('Unable to delete session file: %s', err)
|
||||||
|
|
||||||
def list_servers(self, cache: bool = True) -> List[AternosServer]:
|
def list_servers(self, cache: bool = True) -> List[AternosServer]:
|
||||||
"""Parses a list of your servers from Aternos website
|
"""Parses a list of your servers from Aternos website
|
||||||
|
@ -358,7 +359,7 @@ class Client:
|
||||||
try:
|
try:
|
||||||
self.save_session(self.saved_session)
|
self.save_session(self.saved_session)
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
logging.warning('Unable to save servers list to file: %s', err)
|
log.warning('Unable to save servers list to file: %s', err)
|
||||||
|
|
||||||
return self.servers
|
return self.servers
|
||||||
|
|
||||||
|
@ -377,7 +378,7 @@ class Client:
|
||||||
if servid == '':
|
if servid == '':
|
||||||
continue
|
continue
|
||||||
|
|
||||||
logging.debug('Adding server %s', servid)
|
log.debug('Adding server %s', servid)
|
||||||
srv = AternosServer(servid, self.atconn)
|
srv = AternosServer(servid, self.atconn)
|
||||||
self.servers.append(srv)
|
self.servers.append(srv)
|
||||||
|
|
||||||
|
|
|
@ -6,8 +6,6 @@ import time
|
||||||
import string
|
import string
|
||||||
import secrets
|
import secrets
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
@ -17,6 +15,8 @@ import requests
|
||||||
|
|
||||||
from cloudscraper import CloudScraper
|
from cloudscraper import CloudScraper
|
||||||
|
|
||||||
|
from .atlog import log
|
||||||
|
|
||||||
from . import atjsparse
|
from . import atjsparse
|
||||||
from .aterrors import TokenError
|
from .aterrors import TokenError
|
||||||
from .aterrors import CloudflareError
|
from .aterrors import CloudflareError
|
||||||
|
@ -58,10 +58,10 @@ class AternosConnect:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if len(kwargs) < 1:
|
if len(kwargs) < 1:
|
||||||
logging.debug('**kwargs is empty')
|
log.debug('**kwargs is empty')
|
||||||
return
|
return
|
||||||
|
|
||||||
logging.debug('New args for CloudScraper: %s', kwargs)
|
log.debug('New args for CloudScraper: %s', kwargs)
|
||||||
self.cf_init = partial(CloudScraper, **kwargs)
|
self.cf_init = partial(CloudScraper, **kwargs)
|
||||||
self.refresh_session()
|
self.refresh_session()
|
||||||
|
|
||||||
|
@ -69,7 +69,7 @@ class AternosConnect:
|
||||||
"""Clear CloudScarper object __init__ arguments
|
"""Clear CloudScarper object __init__ arguments
|
||||||
which was set using add_args method"""
|
which was set using add_args method"""
|
||||||
|
|
||||||
logging.debug('Creating session object with no keywords')
|
log.debug('Creating session object with no keywords')
|
||||||
self.cf_init = partial(CloudScraper)
|
self.cf_init = partial(CloudScraper)
|
||||||
self.refresh_session()
|
self.refresh_session()
|
||||||
|
|
||||||
|
@ -110,7 +110,7 @@ class AternosConnect:
|
||||||
# Some checks
|
# Some checks
|
||||||
if headtag < 0 or headend < 0:
|
if headtag < 0 or headend < 0:
|
||||||
pagehead = loginpage
|
pagehead = loginpage
|
||||||
logging.warning(
|
log.warning(
|
||||||
'Unable to find <head> tag, parsing the whole page'
|
'Unable to find <head> tag, parsing the whole page'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -134,18 +134,18 @@ class AternosConnect:
|
||||||
|
|
||||||
except (IndexError, TypeError) as err:
|
except (IndexError, TypeError) as err:
|
||||||
|
|
||||||
logging.warning('---')
|
log.warning('---')
|
||||||
logging.warning('Unable to parse AJAX_TOKEN!')
|
log.warning('Unable to parse AJAX_TOKEN!')
|
||||||
logging.warning('Please, insert the info below')
|
log.warning('Please, insert the info below')
|
||||||
logging.warning('to the GitHub issue description:')
|
log.warning('to the GitHub issue description:')
|
||||||
logging.warning('---')
|
log.warning('---')
|
||||||
|
|
||||||
logging.warning('JavaScript: %s', js_code)
|
log.warning('JavaScript: %s', js_code)
|
||||||
logging.warning(
|
log.warning(
|
||||||
'All script tags: %s',
|
'All script tags: %s',
|
||||||
re.findall(SCRIPT_TAG_REGEX, pagehead)
|
re.findall(SCRIPT_TAG_REGEX, pagehead)
|
||||||
)
|
)
|
||||||
logging.warning('---')
|
log.warning('---')
|
||||||
|
|
||||||
raise TokenError(
|
raise TokenError(
|
||||||
'Unable to parse TOKEN from the page'
|
'Unable to parse TOKEN from the page'
|
||||||
|
@ -247,12 +247,12 @@ class AternosConnect:
|
||||||
for k, v in self.session.cookies.items()
|
for k, v in self.session.cookies.items()
|
||||||
}
|
}
|
||||||
|
|
||||||
logging.debug('Requesting(%s)%s', method, url)
|
log.debug('Requesting(%s)%s', method, url)
|
||||||
logging.debug('headers=%s', headers)
|
log.debug('headers=%s', headers)
|
||||||
logging.debug('params=%s', params)
|
log.debug('params=%s', params)
|
||||||
logging.debug('data=%s', data)
|
log.debug('data=%s', data)
|
||||||
logging.debug('req-cookies=%s', reqcookies_dbg)
|
log.debug('req-cookies=%s', reqcookies_dbg)
|
||||||
logging.debug('session-cookies=%s', session_cookies_dbg)
|
log.debug('session-cookies=%s', session_cookies_dbg)
|
||||||
|
|
||||||
if method == 'POST':
|
if method == 'POST':
|
||||||
sendreq = partial(
|
sendreq = partial(
|
||||||
|
@ -277,7 +277,7 @@ class AternosConnect:
|
||||||
cloudflare = req.status_code == 403
|
cloudflare = req.status_code == 403
|
||||||
|
|
||||||
if html_type and cloudflare:
|
if html_type and cloudflare:
|
||||||
logging.info('Retrying to bypass Cloudflare')
|
log.info('Retrying to bypass Cloudflare')
|
||||||
time.sleep(0.3)
|
time.sleep(0.3)
|
||||||
return self.request_cloudflare(
|
return self.request_cloudflare(
|
||||||
url, method,
|
url, method,
|
||||||
|
@ -286,8 +286,8 @@ class AternosConnect:
|
||||||
sendtoken, retry - 1
|
sendtoken, retry - 1
|
||||||
)
|
)
|
||||||
|
|
||||||
logging.debug('AternosConnect received: %s', req.text[:65])
|
log.debug('AternosConnect received: %s', req.text[:65])
|
||||||
logging.info(
|
log.info(
|
||||||
'%s completed with %s status',
|
'%s completed with %s status',
|
||||||
method, req.status_code
|
method, req.status_code
|
||||||
)
|
)
|
||||||
|
|
|
@ -5,7 +5,6 @@ import abc
|
||||||
import json
|
import json
|
||||||
import base64
|
import base64
|
||||||
|
|
||||||
import logging
|
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
@ -16,6 +15,9 @@ import regex
|
||||||
import js2py
|
import js2py
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
from .atlog import log
|
||||||
|
|
||||||
|
|
||||||
js: Optional['Interpreter'] = None
|
js: Optional['Interpreter'] = None
|
||||||
|
|
||||||
|
|
||||||
|
@ -95,7 +97,7 @@ class NodeInterpreter(Interpreter):
|
||||||
|
|
||||||
assert self.proc.stdout is not None
|
assert self.proc.stdout is not None
|
||||||
ok_msg = self.proc.stdout.readline()
|
ok_msg = self.proc.stdout.readline()
|
||||||
logging.debug('Received from server.js: %s', ok_msg)
|
log.debug('Received from server.js: %s', ok_msg)
|
||||||
|
|
||||||
def exec_js(self, func: str) -> None:
|
def exec_js(self, func: str) -> None:
|
||||||
resp = requests.post(self.url, data=func)
|
resp = requests.post(self.url, data=func)
|
||||||
|
@ -104,7 +106,7 @@ class NodeInterpreter(Interpreter):
|
||||||
def get_var(self, name: str) -> Any:
|
def get_var(self, name: str) -> Any:
|
||||||
resp = requests.post(self.url, data=name)
|
resp = requests.post(self.url, data=name)
|
||||||
resp.raise_for_status()
|
resp.raise_for_status()
|
||||||
logging.debug('NodeJS response: %s', resp.content)
|
log.debug('NodeJS response: %s', resp.content)
|
||||||
return json.loads(resp.content)
|
return json.loads(resp.content)
|
||||||
|
|
||||||
def __del__(self) -> None:
|
def __del__(self) -> None:
|
||||||
|
@ -112,7 +114,7 @@ class NodeInterpreter(Interpreter):
|
||||||
self.proc.terminate()
|
self.proc.terminate()
|
||||||
self.proc.communicate()
|
self.proc.communicate()
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
logging.warning(
|
log.warning(
|
||||||
'NodeJS process was not initialized'
|
'NodeJS process was not initialized'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
2
python_aternos/atlog.py
Normal file
2
python_aternos/atlog.py
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
import logging
|
||||||
|
log = logging.getLogger('aternos')
|
|
@ -4,7 +4,6 @@ for real-time information"""
|
||||||
import enum
|
import enum
|
||||||
import json
|
import json
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
|
||||||
|
|
||||||
from typing import Iterable
|
from typing import Iterable
|
||||||
from typing import Union, Any
|
from typing import Union, Any
|
||||||
|
@ -14,10 +13,13 @@ from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import websockets
|
import websockets
|
||||||
|
|
||||||
|
from .atlog import log
|
||||||
from .atconnect import REQUA
|
from .atconnect import REQUA
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .atserver import AternosServer
|
from .atserver import AternosServer
|
||||||
|
|
||||||
|
|
||||||
OneArgT = Callable[[Any], Coroutine[Any, Any, None]]
|
OneArgT = Callable[[Any], Coroutine[Any, Any, None]]
|
||||||
TwoArgT = Callable[[Any, Tuple[Any, ...]], Coroutine[Any, Any, None]]
|
TwoArgT = Callable[[Any, Tuple[Any, ...]], Coroutine[Any, Any, None]]
|
||||||
FunctionT = Union[OneArgT, TwoArgT]
|
FunctionT = Union[OneArgT, TwoArgT]
|
||||||
|
@ -196,7 +198,7 @@ class AternosWss:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if strm.stream:
|
if strm.stream:
|
||||||
logging.debug('Requesting %s stream', strm.stream)
|
log.debug('Requesting %s stream', strm.stream)
|
||||||
await self.send({
|
await self.send({
|
||||||
'stream': strm.stream,
|
'stream': strm.stream,
|
||||||
'type': 'start'
|
'type': 'start'
|
||||||
|
@ -223,7 +225,7 @@ class AternosWss:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if self.socket is None:
|
if self.socket is None:
|
||||||
logging.warning('Did you forget to call socket.connect?')
|
log.warning('Did you forget to call socket.connect?')
|
||||||
await self.connect()
|
await self.connect()
|
||||||
|
|
||||||
assert self.socket is not None
|
assert self.socket is not None
|
||||||
|
|
Reference in a new issue