mirror of
https://github.com/python/cpython.git
synced 2025-12-01 07:19:24 +00:00
Issue #12180: Fixed a few remaining errors in test_packaging when no threading.
This commit is contained in:
parent
d2780aedce
commit
5eb555990a
5 changed files with 39 additions and 7 deletions
|
|
@ -253,6 +253,15 @@ def create_distribution(configfiles=()):
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
def fake_dec(*args, **kw):
|
||||||
|
"""Fake decorator"""
|
||||||
|
def _wrap(func):
|
||||||
|
def __wrap(*args, **kw):
|
||||||
|
return func(*args, **kw)
|
||||||
|
return __wrap
|
||||||
|
return _wrap
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from test.support import skip_unless_symlink
|
from test.support import skip_unless_symlink
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
|
|
||||||
|
|
@ -6,13 +6,14 @@ from packaging import install
|
||||||
from packaging.pypi.xmlrpc import Client
|
from packaging.pypi.xmlrpc import Client
|
||||||
from packaging.metadata import Metadata
|
from packaging.metadata import Metadata
|
||||||
|
|
||||||
from packaging.tests.support import LoggingCatcher, TempdirManager, unittest
|
from packaging.tests.support import (LoggingCatcher, TempdirManager, unittest,
|
||||||
|
fake_dec)
|
||||||
try:
|
try:
|
||||||
import threading
|
import threading
|
||||||
from packaging.tests.pypi_server import use_xmlrpc_server
|
from packaging.tests.pypi_server import use_xmlrpc_server
|
||||||
except ImportError:
|
except ImportError:
|
||||||
threading = None
|
threading = None
|
||||||
use_xmlrpc_server = None
|
use_xmlrpc_server = fake_dec
|
||||||
|
|
||||||
|
|
||||||
class InstalledDist:
|
class InstalledDist:
|
||||||
|
|
|
||||||
|
|
@ -7,12 +7,13 @@ from packaging.pypi.dist import (ReleaseInfo, ReleasesList, DistInfo,
|
||||||
from packaging.pypi.errors import HashDoesNotMatch, UnsupportedHashName
|
from packaging.pypi.errors import HashDoesNotMatch, UnsupportedHashName
|
||||||
|
|
||||||
from packaging.tests import unittest
|
from packaging.tests import unittest
|
||||||
from packaging.tests.support import TempdirManager, requires_zlib
|
from packaging.tests.support import TempdirManager, requires_zlib, fake_dec
|
||||||
try:
|
try:
|
||||||
import threading
|
import threading
|
||||||
from packaging.tests.pypi_server import use_pypi_server
|
from packaging.tests.pypi_server import use_pypi_server
|
||||||
except ImportError:
|
except ImportError:
|
||||||
threading = use_pypi_server = None
|
threading = None
|
||||||
|
use_pypi_server = fake_dec
|
||||||
|
|
||||||
|
|
||||||
def Dist(*args, **kwargs):
|
def Dist(*args, **kwargs):
|
||||||
|
|
|
||||||
|
|
@ -10,9 +10,19 @@ import urllib.request
|
||||||
from packaging.pypi.simple import Crawler
|
from packaging.pypi.simple import Crawler
|
||||||
|
|
||||||
from packaging.tests import unittest
|
from packaging.tests import unittest
|
||||||
from packaging.tests.support import TempdirManager, LoggingCatcher
|
from packaging.tests.support import (TempdirManager, LoggingCatcher,
|
||||||
from packaging.tests.pypi_server import (use_pypi_server, PyPIServer,
|
fake_dec)
|
||||||
PYPI_DEFAULT_STATIC_PATH)
|
|
||||||
|
try:
|
||||||
|
import _thread
|
||||||
|
from packaging.tests.pypi_server import (use_pypi_server, PyPIServer,
|
||||||
|
PYPI_DEFAULT_STATIC_PATH)
|
||||||
|
except ImportError:
|
||||||
|
_thread = None
|
||||||
|
use_pypi_server = fake_dec
|
||||||
|
PYPI_DEFAULT_STATIC_PATH = os.path.join(
|
||||||
|
os.path.dirname(os.path.abspath(__file__)), 'pypiserver')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class SimpleCrawlerTestCase(TempdirManager,
|
class SimpleCrawlerTestCase(TempdirManager,
|
||||||
|
|
@ -28,6 +38,7 @@ class SimpleCrawlerTestCase(TempdirManager,
|
||||||
return Crawler(server.full_address + base_url, *args,
|
return Crawler(server.full_address + base_url, *args,
|
||||||
**kwargs)
|
**kwargs)
|
||||||
|
|
||||||
|
@unittest.skipIf(_thread is None, 'needs threads')
|
||||||
@use_pypi_server()
|
@use_pypi_server()
|
||||||
def test_bad_urls(self, server):
|
def test_bad_urls(self, server):
|
||||||
crawler = Crawler()
|
crawler = Crawler()
|
||||||
|
|
@ -84,6 +95,7 @@ class SimpleCrawlerTestCase(TempdirManager,
|
||||||
'http://www.famfamfam.com/">')
|
'http://www.famfamfam.com/">')
|
||||||
crawler._process_url(url, page)
|
crawler._process_url(url, page)
|
||||||
|
|
||||||
|
@unittest.skipIf(_thread is None, 'needs threads')
|
||||||
@use_pypi_server("test_found_links")
|
@use_pypi_server("test_found_links")
|
||||||
def test_found_links(self, server):
|
def test_found_links(self, server):
|
||||||
# Browse the index, asking for a specified release version
|
# Browse the index, asking for a specified release version
|
||||||
|
|
@ -139,6 +151,7 @@ class SimpleCrawlerTestCase(TempdirManager,
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
crawler._is_browsable("http://pypi.example.org/a/path"))
|
crawler._is_browsable("http://pypi.example.org/a/path"))
|
||||||
|
|
||||||
|
@unittest.skipIf(_thread is None, 'needs threads')
|
||||||
@use_pypi_server("with_externals")
|
@use_pypi_server("with_externals")
|
||||||
def test_follow_externals(self, server):
|
def test_follow_externals(self, server):
|
||||||
# Include external pages
|
# Include external pages
|
||||||
|
|
@ -149,6 +162,7 @@ class SimpleCrawlerTestCase(TempdirManager,
|
||||||
self.assertIn(server.full_address + "/external/external.html",
|
self.assertIn(server.full_address + "/external/external.html",
|
||||||
crawler._processed_urls)
|
crawler._processed_urls)
|
||||||
|
|
||||||
|
@unittest.skipIf(_thread is None, 'needs threads')
|
||||||
@use_pypi_server("with_real_externals")
|
@use_pypi_server("with_real_externals")
|
||||||
def test_restrict_hosts(self, server):
|
def test_restrict_hosts(self, server):
|
||||||
# Only use a list of allowed hosts is possible
|
# Only use a list of allowed hosts is possible
|
||||||
|
|
@ -159,6 +173,7 @@ class SimpleCrawlerTestCase(TempdirManager,
|
||||||
self.assertNotIn(server.full_address + "/external/external.html",
|
self.assertNotIn(server.full_address + "/external/external.html",
|
||||||
crawler._processed_urls)
|
crawler._processed_urls)
|
||||||
|
|
||||||
|
@unittest.skipIf(_thread is None, 'needs threads')
|
||||||
@use_pypi_server(static_filesystem_paths=["with_externals"],
|
@use_pypi_server(static_filesystem_paths=["with_externals"],
|
||||||
static_uri_paths=["simple", "external"])
|
static_uri_paths=["simple", "external"])
|
||||||
def test_links_priority(self, server):
|
def test_links_priority(self, server):
|
||||||
|
|
@ -192,6 +207,7 @@ class SimpleCrawlerTestCase(TempdirManager,
|
||||||
releases[0].dists['sdist'].url['hashval'])
|
releases[0].dists['sdist'].url['hashval'])
|
||||||
self.assertEqual('md5', releases[0].dists['sdist'].url['hashname'])
|
self.assertEqual('md5', releases[0].dists['sdist'].url['hashname'])
|
||||||
|
|
||||||
|
@unittest.skipIf(_thread is None, 'needs threads')
|
||||||
@use_pypi_server(static_filesystem_paths=["with_norel_links"],
|
@use_pypi_server(static_filesystem_paths=["with_norel_links"],
|
||||||
static_uri_paths=["simple", "external"])
|
static_uri_paths=["simple", "external"])
|
||||||
def test_not_scan_all_links(self, server):
|
def test_not_scan_all_links(self, server):
|
||||||
|
|
@ -217,6 +233,7 @@ class SimpleCrawlerTestCase(TempdirManager,
|
||||||
self.assertIn("%s/foobar-2.0.tar.gz" % server.full_address,
|
self.assertIn("%s/foobar-2.0.tar.gz" % server.full_address,
|
||||||
crawler._processed_urls) # linked from external homepage (rel)
|
crawler._processed_urls) # linked from external homepage (rel)
|
||||||
|
|
||||||
|
@unittest.skipIf(_thread is None, 'needs threads')
|
||||||
def test_uses_mirrors(self):
|
def test_uses_mirrors(self):
|
||||||
# When the main repository seems down, try using the given mirrors"""
|
# When the main repository seems down, try using the given mirrors"""
|
||||||
server = PyPIServer("foo_bar_baz")
|
server = PyPIServer("foo_bar_baz")
|
||||||
|
|
@ -314,6 +331,7 @@ class SimpleCrawlerTestCase(TempdirManager,
|
||||||
self.assertIn('http://example.org/some/simpleurl', found_links)
|
self.assertIn('http://example.org/some/simpleurl', found_links)
|
||||||
self.assertIn('http://example.org/some/download', found_links)
|
self.assertIn('http://example.org/some/download', found_links)
|
||||||
|
|
||||||
|
@unittest.skipIf(_thread is None, 'needs threads')
|
||||||
@use_pypi_server("project_list")
|
@use_pypi_server("project_list")
|
||||||
def test_search_projects(self, server):
|
def test_search_projects(self, server):
|
||||||
# we can search the index for some projects, on their names
|
# we can search the index for some projects, on their names
|
||||||
|
|
|
||||||
|
|
@ -161,6 +161,9 @@ Core and Builtins
|
||||||
Library
|
Library
|
||||||
-------
|
-------
|
||||||
|
|
||||||
|
- Issue #12180: Fixed a few remaining errors in test_packaging when no
|
||||||
|
threading.
|
||||||
|
|
||||||
- Issue #12175: RawIOBase.readall() now returns None if read() returns None.
|
- Issue #12175: RawIOBase.readall() now returns None if read() returns None.
|
||||||
|
|
||||||
- Issue #12175: FileIO.readall() now raises a ValueError instead of an IOError
|
- Issue #12175: FileIO.readall() now raises a ValueError instead of an IOError
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue