ok
Direktori : /opt/cloudlinux/venv/lib/python3.11/site-packages/clwpos/user/website_check/ |
Current File : //opt/cloudlinux/venv/lib/python3.11/site-packages/clwpos/user/website_check/checkers.py |
# Copyright © Cloud Linux GmbH & Cloud Linux Software, Inc 2010-2020 All Rights Reserved # # Licensed under CLOUD LINUX LICENSE AGREEMENT # http://cloudlinux.com/docs/LICENSE.TXT import logging import os import time from contextlib import contextmanager from typing import Optional import requests from requests import ConnectionError, Timeout from requests.exceptions import SSLError, RequestException, ChunkedEncodingError import urllib3 from urllib3.exceptions import ReadTimeoutError from .errors import ( PhpLogErrorsFound, WebsiteCheckBadHttpCode, WebsiteTimeout, WebsiteHttpsBroken, WebsiteNotResponding, PostCheckRequestException ) # long timeout is set because our tested # sites may be really slow TIMEOUT: int = 10 HEADERS = { 'User-Agent': 'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.13) ' 'Gecko/20101209 CentOS/3.6-2.el5.centos Firefox/3.6.13' } ERROR_LOGFILE_NAME = "error_log" ERROR_PATTERNS = [ "PHP Fatal error", # not all php errors are marked # as fatal in logs, see tests for examples "Stack trace" ] def retry_on_exceptions(max_retries, exceptions_to_retry): """ Decorator to retry method on specific exceptions """ def decorator(func): def wrapper(*args, **kwargs): retries = 0 exception = ValueError(f'Request to website failed even after {max_retries} retries.') while retries < max_retries: try: return func(*args, **kwargs) except tuple(exceptions_to_retry) as e: retries += 1 logging.warning('Retry to request website, exception: %s', str(e)) exception = e time.sleep(1) # Wait for 1 second before retrying raise exception return wrapper return decorator @retry_on_exceptions(3, [ChunkedEncodingError]) def _request_url(url): """ retry on: - ChunkedEncodingError -> sometimes error happens due to network issues/glitch """ response = requests.get(url, timeout=TIMEOUT, verify=False, headers=HEADERS) return response def _make_request(url: str) -> Optional[str]: """ Checks that website response http code is in range 200-400 and returns. Otherwise, throws a human-readable error. :param url: website url represented as "protocol://domain.zone/wp_installation_path" """ try: response = _request_url(url) if 200 <= response.status_code < 400: return raise WebsiteCheckBadHttpCode(response.url, response.status_code) except Timeout: raise WebsiteTimeout(url, timeout=TIMEOUT) except SSLError as e: raise WebsiteHttpsBroken(url, details=str(e)) except ConnectionError as e: # really strange behavior of requests that wrap # errors inside of ConnectionError if e.args and isinstance(e.args[0], ReadTimeoutError): raise WebsiteTimeout(url, timeout=TIMEOUT) raise WebsiteNotResponding(url, details=str(e)) # general request error or ValueError (it raised by retry method) except (RequestException, ValueError) as e: raise PostCheckRequestException(url, error=str(e)) @contextmanager def check_error_log(abs_wp_path: str): """ Looks into {doc_root}/public_html and searches for any php errors found. As log can be really big, this context manager captures log size at start point and then analyzes only part which was added during context execution :param abs_wp_path: path to the root directory of wordpress installation """ error_log_path = os.path.join(abs_wp_path, ERROR_LOGFILE_NAME) if os.path.exists(error_log_path): old_stat_result = os.stat(error_log_path) else: # the case when log was removed for some reason old_stat_result = None yield # log file still does not exist, nothing to check if not os.path.exists(error_log_path): logging.warning('%s does not exists, nothing to analyze in log file', error_log_path) return cur_stat_result = os.stat(error_log_path) with open(error_log_path, "r") as f: # log file was not rotated, skip the beginning if old_stat_result \ and old_stat_result.st_ino == cur_stat_result.st_ino \ and old_stat_result.st_size <= cur_stat_result.st_size: f.seek(old_stat_result.st_size) # "[" is not a misprint # logfile contains of records that start with date and time surronded by []: # [2021 01 01 19:86] text # text text multiline # [2021 01 01 19:86] another message # so what I'm trying to do here is split file not by lines, but by log records # it might not work in some cases, but must be fine in 99% for log_record in f.read().split('\n['): if not any((error_pattern in log_record for error_pattern in ERROR_PATTERNS)): continue raise PhpLogErrorsFound(error_log_path, log_record) def check_site_status_code(domain: str, wp_path: str) -> Optional[str]: """ Checks website to decide whether AccelerateWP installation was successful. :param domain: domain name represented as "domain.zone" protocol will be added automatically :param wp_path: path to the wordpress installation instance :raises: WebsiteCheckError and subclasses """ # if certificate is bad, but the site itself works, # we consider it ok for WPOS. urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) try: http_url = 'http://' + domain + '/' + wp_path _make_request(http_url) except WebsiteNotResponding: # Some websites did not enable HTTP to HTTPS redirection. # Try connecting with HTTPS protocol. https_url = 'https://' + domain + '/' + wp_path _make_request(https_url)