diff --git a/wpull/application/app.py b/wpull/application/app.py index 415e8dcb..1062523d 100644 --- a/wpull/application/app.py +++ b/wpull/application/app.py @@ -218,7 +218,7 @@ def _print_ssl_error(cls): '''Print an invalid SSL certificate warning.''' _logger.info(_('A SSL certificate could not be verified.')) _logger.info(_('To ignore and proceed insecurely, ' - 'use ‘--no-check-certificate’.')) + 'use \'--no-check-certificate\'.')) @classmethod def _print_crash_message(cls): diff --git a/wpull/application/hook.py b/wpull/application/hook.py index 6d2a5873..523630bd 100644 --- a/wpull/application/hook.py +++ b/wpull/application/hook.py @@ -150,7 +150,7 @@ def connect_plugin(self, plugin: WpullPlugin): _logger.debug('Connected hook %s %s', name, func) self.hook_dispatcher.connect(name, func) elif self.event_dispatcher.is_registered(name): - raise RuntimeError('Plugin event ‘{name}’ cannot be attached as a hook function.'.format(name=name)) + raise RuntimeError('Plugin event \'{name}\' cannot be attached as a hook function.'.format(name=name)) elif category == PluginFunctionCategory.event and self.event_dispatcher.is_registered(name): _logger.debug('Connected event %s %s', name, func) diff --git a/wpull/application/tasks/plugin.py b/wpull/application/tasks/plugin.py index 50f422dd..8496ecb1 100644 --- a/wpull/application/tasks/plugin.py +++ b/wpull/application/tasks/plugin.py @@ -118,7 +118,7 @@ def _connect_plugin_hooks(cls, session: AppSession, plugin_object: WpullPlugin): instance.connect_plugin(plugin_object) # TODO: raise error if any function is left unattached - # raise RuntimeError('Plugin function ‘{function_name}’ could not be attached to plugin hook/event ‘{name}’.'.format(name=name, function_name=func)) + # raise RuntimeError('Plugin function \'{function_name}\' could not be attached to plugin hook/event \'{name}\'.'.format(name=name, function_name=func)) @classmethod def _debug_log_registered_hooks(cls, session: AppSession): diff --git a/wpull/converter.py b/wpull/converter.py index 77afc465..6961c714 100644 --- a/wpull/converter.py +++ b/wpull/converter.py @@ -80,7 +80,7 @@ def convert_by_record(self, url_record): link_type = None _logger.info(__( - _('Converting links in file ‘{filename}’ (type={type}).'), + _('Converting links in file \'{filename}\' (type={type}).'), filename=filename, type=link_type )) diff --git a/wpull/pipeline/session.py b/wpull/pipeline/session.py index 38dc91e8..731e5fd3 100644 --- a/wpull/pipeline/session.py +++ b/wpull/pipeline/session.py @@ -57,7 +57,7 @@ def response(self, response: BaseResponse): def skip(self): '''Mark the item as processed without download.''' - _logger.debug(__(_('Skipping ‘{url}’.'), url=self.url_record.url)) + _logger.debug(__(_('Skipping \'{url}\'.'), url=self.url_record.url)) self.app_session.factory['URLTable'].check_in(self.url_record.url, Status.skipped) self._processed = True diff --git a/wpull/processor/base.py b/wpull/processor/base.py index faccfcbc..62e85de7 100644 --- a/wpull/processor/base.py +++ b/wpull/processor/base.py @@ -55,6 +55,6 @@ class BaseProcessorSession(object, metaclass=abc.ABCMeta): def _log_error(self, request, error): '''Log exceptions during a fetch.''' _logger.error( - _('Fetching ‘{url}’ encountered an error: {error}'), + _('Fetching \'{url}\' encountered an error: {error}'), url=request.url, error=error ) diff --git a/wpull/processor/coprocessor/phantomjs.py b/wpull/processor/coprocessor/phantomjs.py index fb1af12e..e26d71af 100644 --- a/wpull/processor/coprocessor/phantomjs.py +++ b/wpull/processor/coprocessor/phantomjs.py @@ -117,7 +117,7 @@ def process(self, item_session: ItemSession, request, response, file_writer_sess break else: _logger.warning(__( - _('PhantomJS failed to fetch ‘{url}’. I am sorry.'), + _('PhantomJS failed to fetch \'{url}\'. I am sorry.'), url=request.url_info.url )) @@ -184,7 +184,7 @@ def run(self): driver = self._phantomjs_driver_factory(params=driver_params) _logger.info(__( - _('PhantomJS fetching ‘{url}’.'), + _('PhantomJS fetching \'{url}\'.'), url=url )) @@ -213,7 +213,7 @@ def run(self): self._add_warc_snapshot(path, url) _logger.info(__( - _('PhantomJS fetched ‘{url}’.'), + _('PhantomJS fetched \'{url}\'.'), url=url )) diff --git a/wpull/processor/coprocessor/proxy.py b/wpull/processor/coprocessor/proxy.py index 0205498b..162dca06 100644 --- a/wpull/processor/coprocessor/proxy.py +++ b/wpull/processor/coprocessor/proxy.py @@ -120,7 +120,7 @@ def _client_request_callback(self, request: Request): if verdict: _logger.info(__( - _('Fetching ‘{url}’.'), + _('Fetching \'{url}\'.'), url=request.url_info.url )) @@ -144,7 +144,7 @@ def _server_end_response_callback(self, respoonse: Response): response = self._item_session.response _logger.info(__( - _('Fetched ‘{url}’: {status_code} {reason}. ' + _('Fetched \'{url}\': {status_code} {reason}. ' 'Length: {content_length} [{content_type}].'), url=request.url, status_code=response.status_code, diff --git a/wpull/processor/coprocessor/youtubedl.py b/wpull/processor/coprocessor/youtubedl.py index e2d28f70..de448d6d 100644 --- a/wpull/processor/coprocessor/youtubedl.py +++ b/wpull/processor/coprocessor/youtubedl.py @@ -52,12 +52,12 @@ def process(self, item_session: ItemSession, request, response, file_writer_sess ) url = item_session.url_record.url - _logger.info(__(_('youtube-dl fetching ‘{url}’.'), url=url)) + _logger.info(__(_('youtube-dl fetching \'{url}\'.'), url=url)) with contextlib.closing(session): yield from session.run() - _logger.info(__(_('youtube-dl fetched ‘{url}’.'), url=url)) + _logger.info(__(_('youtube-dl fetched \'{url}\'.'), url=url)) class Session(object): diff --git a/wpull/processor/ftp.py b/wpull/processor/ftp.py index 6a327355..dabb9189 100644 --- a/wpull/processor/ftp.py +++ b/wpull/processor/ftp.py @@ -254,7 +254,7 @@ def _fetch(self, request: Request, is_file: bool): Coroutine. ''' - _logger.info(_('Fetching ‘{url}’.'), url=request.url) + _logger.info(_('Fetching \'{url}\'.'), url=request.url) self._item_session.request = request response = None @@ -366,7 +366,7 @@ def _add_listing_links(self, response: ListingResponse): def _log_response(self, request: Request, response: Response): '''Log response.''' _logger.info( - _('Fetched ‘{url}’: {reply_code} {reply_text}. ' + _('Fetched \'{url}\': {reply_code} {reply_text}. ' 'Length: {content_length}.'), url=request.url, reply_code=response.reply.code, diff --git a/wpull/processor/web.py b/wpull/processor/web.py index 3ccc1ea2..95ea0982 100644 --- a/wpull/processor/web.py +++ b/wpull/processor/web.py @@ -201,7 +201,7 @@ def _process_robots(self): request)) except REMOTE_ERRORS as error: _logger.error( - _('Fetching robots.txt for ‘{url}’ ' + _('Fetching robots.txt for \'{url}\' ' 'encountered an error: {error}'), url=self._next_url_info.url, error=error ) @@ -260,7 +260,7 @@ def _fetch_one(self, request: Request) -> Tuple[bool, float]: Returns: If True, stop processing any future requests. ''' - _logger.info(_('Fetching ‘{url}’.'), url=request.url) + _logger.info(_('Fetching \'{url}\'.'), url=request.url) response = None @@ -391,7 +391,7 @@ def _add_post_data(self, request: Request): def _log_response(self, request: Request, response: Response): '''Log response.''' _logger.info( - _('Fetched ‘{url}’: {status_code} {reason}. ' + _('Fetched \'{url}\': {status_code} {reason}. ' 'Length: {content_length} [{content_type}].'), url=request.url, status_code=response.status_code, diff --git a/wpull/scraper/css.py b/wpull/scraper/css.py index 969743a3..fe5ae75d 100644 --- a/wpull/scraper/css.py +++ b/wpull/scraper/css.py @@ -54,7 +54,7 @@ def scrape(self, request, response, link_type=None): except UnicodeError as error: _logger.warning(__( - _('Failed to read document at ‘{url}’: {error}'), + _('Failed to read document at \'{url}\': {error}'), url=request.url_info.url, error=error )) diff --git a/wpull/scraper/html.py b/wpull/scraper/html.py index a1672515..ec9666e1 100644 --- a/wpull/scraper/html.py +++ b/wpull/scraper/html.py @@ -110,7 +110,7 @@ def scrape(self, request, response, link_type=None): except (UnicodeError, self._html_parser.parser_error) as error: _logger.warning( - _('Failed to read document at ‘{url}’: {error}'), + _('Failed to read document at \'{url}\': {error}'), url=request.url_info.url, error=error ) result_meta_info = {} diff --git a/wpull/scraper/javascript.py b/wpull/scraper/javascript.py index 2a4b54e0..4c3dac6e 100644 --- a/wpull/scraper/javascript.py +++ b/wpull/scraper/javascript.py @@ -78,7 +78,7 @@ def scrape(self, request, response, link_type=None): except UnicodeError as error: _logger.warning( - _('Failed to read document at ‘{url}’: {error}'), + _('Failed to read document at \'{url}\': {error}'), url=request.url_info.url, error=error ) diff --git a/wpull/scraper/sitemap.py b/wpull/scraper/sitemap.py index 9f6c0212..06465b9a 100644 --- a/wpull/scraper/sitemap.py +++ b/wpull/scraper/sitemap.py @@ -39,7 +39,7 @@ def scrape(self, request, response, link_type=None): except (UnicodeError, self._html_parser.parser_error) as error: _logger.warning( - _('Failed to read document at ‘{url}’: {error}'), + _('Failed to read document at \'{url}\': {error}'), url=request.url_info.url, error=error ) diff --git a/wpull/scraper/util.py b/wpull/scraper/util.py index 791fb885..f05759c5 100644 --- a/wpull/scraper/util.py +++ b/wpull/scraper/util.py @@ -76,7 +76,7 @@ def urljoin_safe(base_url, url, allow_fragments=True): ) except ValueError as error: _logger.warning(__( - _('Unable to parse URL ‘{url}’: {error}.'), + _('Unable to parse URL \'{url}\': {error}.'), url=url, error=error )) diff --git a/wpull/url.py b/wpull/url.py index 1d61c2a3..996af5ee 100644 --- a/wpull/url.py +++ b/wpull/url.py @@ -406,7 +406,7 @@ def parse_url_or_log(url, encoding='utf-8'): url_info = URLInfo.parse(url, encoding=encoding) except ValueError as error: _logger.warning(__( - _('Unable to parse URL ‘{url}’: {error}.'), + _('Unable to parse URL \'{url}\': {error}.'), url=wpull.string.printable_str(url), error=error)) else: return url_info