• Bug#1064752: intake: FTBFS: dh_auto_test: error: pybuild --test --test-

    From Lucas Nussbaum@21:1/5 to All on Sun Feb 25 21:00:18 2024
    Source: intake
    Version: 0.6.6-3
    Severity: serious
    Justification: FTBFS
    Tags: trixie sid ftbfs
    User: lucas@debian.org
    Usertags: ftbfs-20240224 ftbfs-trixie

    Hi,

    During a rebuild of all packages in sid, your package failed to build
    on amd64.


    Relevant part (hopefully):
    make[1]: Entering directory '/<<PKGBUILDDIR>>'
    dh_install
    # For some reason, not all data is movable via install or dh_install, so
    # just force copy (and overwrite if needed) for pytest to actually work
    set -e \
    ; for py in `py3versions -sv` \
    ; do builddir=".pybuild/cpython3_${py}_intake/build" \
    ; cp -a intake/source/tests ${builddir}/intake/source \
    ; cp -a intake/catalog/tests ${builddir}/intake/catalog \
    ; cp -a intake/interface/tests ${builddir}/intake/interface \
    ; done
    PYBUILD_SYSTEM=custom \
    PYBUILD_TEST_ARGS='cd {build_dir}; PATH=/<<PKGBUILDDIR>>/debian/{package}/usr/bin:/<<PKGBUILDDIR>>/debian/{package}/usr/lib:/<<PKGBUILDDIR>>/debian/{package}/build/intake:$PATH {interpreter} -m pytest' \
    dh_auto_test --buildsystem=pybuild
    I: pybuild base:305: cd /<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build; PATH=/<<PKGBUILDDIR>>/debian/python3-intake/usr/bin:/<<PKGBUILDDIR>>/debian/python3-intake/usr/lib:/<<PKGBUILDDIR>>/debian/python3-intake/build/intake:$PATH python3.12 -m
    pytest
    ============================= test session starts ==============================
    platform linux -- Python 3.12.2, pytest-7.4.4, pluggy-1.4.0
    rootdir: /<<PKGBUILDDIR>>
    collected 427 items / 10 skipped

    intake/auth/tests/test_auth.py ...... [ 1%]
    intake/catalog/tests/test_alias.py .. [ 1%]
    intake/catalog/tests/test_auth_integration.py .. [ 2%]
    intake/catalog/tests/test_caching_integration.py ...F.............. [ 6%]
    intake/catalog/tests/test_catalog_save.py . [ 6%]
    intake/catalog/tests/test_core.py .. [ 7%]
    intake/catalog/tests/test_default.py . [ 7%]
    intake/catalog/tests/test_discovery.py .. [ 7%]
    intake/catalog/tests/test_gui.py ..s..s [ 9%]
    intake/catalog/tests/test_local.py ..F.................................. [ 18%]
    ......................................................ssssss [ 32%]
    intake/catalog/tests/test_parameters.py .............. [ 35%]
    intake/catalog/tests/test_persist.py .s [ 35%]
    intake/catalog/tests/test_reload_integration.py .... [ 36%]
    intake/catalog/tests/test_remote_integration.py F...F..F.F.FFFFFF.F..... [ 42%]
    ... [ 43%]
    intake/catalog/tests/test_utils.py ............. [ 46%]
    intake/catalog/tests/test_zarr.py ... [ 46%]
    intake/cli/client/tests/test_cache.py ...... [ 48%]
    intake/cli/client/tests/test_conf.py ..... [ 49%]
    intake/cli/client/tests/test_local_integration.py .....FF.. [ 51%]
    intake/cli/server/tests/test_serializer.py sss......... [ 54%]
    intake/cli/server/tests/test_server.py ..FFF.ss.. [ 56%]
    intake/cli/tests/test_util.py ........ [ 58%]
    intake/container/tests/test_generics.py . [ 58%]
    intake/container/tests/test_persist.py ...s [ 59%]
    intake/interface/tests/test_init_gui.py ..s [ 60%]
    intake/source/tests/test_base.py ....................... [ 65%]
    intake/source/tests/test_cache.py ...............s [ 69%]
    intake/source/tests/test_csv.py ...........s.. [ 72%]
    intake/source/tests/test_derived.py ...F [ 73%]
    intake/source/tests/test_discovery.py ..... [ 74%]
    intake/source/tests/test_json.py ..................... [ 79%]
    intake/source/tests/test_npy.py ........... [ 82%]
    intake/source/tests/test_text.py ................FF [ 86%]
    intake/source/tests/test_utils.py ................................ [ 94%]
    intake/tests/test_config.py ......... [ 96%]
    intake/tests/test_top_level.py ......s... [ 98%]
    intake/tests/test_utils.py ...... [100%]

    =================================== FAILURES ===================================
    ______________________________ test_load_textfile ______________________________

    catalog_cache = <Intake catalog: catalog_caching>

    def test_load_textfile(catalog_cache):
    cat = catalog_cache['text_cache']
    cache = cat.cache[0]

    cache_paths = cache.load(cat._urlpath, output=False)
    cache_path = cache_paths[-1]
    E TypeError: 'NoneType' object is not subscriptable

    intake/catalog/tests/test_caching_integration.py:53: TypeError _________________________________ test_nested __________________________________

    args = ('/<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/catalog/tests//entry1_*.csv',)
    kwargs = {'storage_options': None}
    func = <function make_reader.<locals>.read at 0x7ff50e4e9f80>

    @wraps(fn)
    def wrapper(*args, **kwargs):
    func = getattr(self, dispatch_name)
    try:
    return func(*args, **kwargs)

    /usr/lib/python3/dist-packages/dask/backends.py:136:
    _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
    /usr/lib/python3/dist-packages/dask/dataframe/io/csv.py:763: in read
    return read_pandas(
    _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

    reader = <function read_csv at 0x7ff50f5aaf20>
    urlpath = '/<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/catalog/tests//entry1_*.csv'
    blocksize = 'default', lineterminator = '\n', compression = 'infer'
    sample = 256000, sample_rows = 10, enforce = False, assume_missing = False storage_options = None, include_path_column = False, kwargs = {}
    reader_name = 'read_csv', kw = 'chunksize', lastskiprow = 0, firstrow = 0 path_converter = None, paths = []

    def read_pandas(
    reader,
    urlpath,
    blocksize="default",
    lineterminator=None,
    compression="infer",
    sample=256000,
    sample_rows=10,
    enforce=False,
    assume_missing=False,
    storage_options=None,
    include_path_column=False,
    **kwargs,
    ):
    reader_name = reader.__name__
    if lineterminator is not None and len(lineterminator) == 1:
    kwargs["lineterminator"] = lineterminator
    else:
    lineterminator = "\n"
    if include_path_column and isinstance(include_path_column, bool):
    include_path_column = "path"
    if "index" in kwargs or (
    "index_col" in kwargs and kwargs.get("index_col") is not False
    ):
    raise ValueError(
    "Keywords 'index' and 'index_col' not supported, except for "
    "'index_col=False'. Use dd.{reader_name}(...).set_index('my-index') instead"
    )
    for kw in ["iterator", "chunksize"]:
    if kw in kwargs:
    raise ValueError(f"{kw} not supported for dd.{reader_name}")
    if kwargs.get("nrows", None):
    raise ValueError(
    "The 'nrows' keyword is not supported by "
    "`dd.{0}`. To achieve the same behavior, it's "
    "recommended to use `dd.{0}(...)."
    "head(n=nrows)`".format(reader_name)
    )
    if isinstance(kwargs.get("skiprows"), int):
    lastskiprow = firstrow = kwargs.get("skiprows")
    elif kwargs.get("skiprows") is None:
    lastskiprow = firstrow = 0
    else:
    # When skiprows is a list, we expect more than max(skiprows) to
    # be included in the sample. This means that [0,2] will work well,
    # but [0, 440] might not work.
    skiprows = set(kwargs.get("skiprows"))
    lastskiprow = max(skiprows)
    # find the firstrow that is not skipped, for use as header
    firstrow = min(set(range(len(skiprows) + 1)) - set(skiprows))
    if isinstance(kwargs.get("header"), list):
    raise TypeError(f"List of header rows not supported for dd.{reader_name}")
    if isinstance(kwargs.get("converters"), dict) and include_path_column:
    path_converter = kwargs.get("converters").get(include_path_column, None)
    else:
    path_converter = None

    # If compression is "infer", inspect the (first) path suffix and
    # set the proper compression option if the suffix is recongnized.
    if compression == "infer":
    # Translate the input urlpath to a simple path list
    paths = get_fs_token_paths(urlpath, mode="rb", storage_options=storage_options)[
    2
    ]

    # Check for at least one valid path
    if len(paths) == 0:
    raise OSError(f"{urlpath} resolved to no files")
    E OSError: /<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/catalog/tests//entry1_*.csv resolved to no files

    /usr/lib/python3/dist-packages/dask/dataframe/io/csv.py:535: OSError

    The above exception was the direct cause of the following exception:

    catalog1 = <Intake catalog: name_in_cat>

    def test_nested(catalog1):
    assert 'nested' in catalog1
    assert 'entry1' in catalog1.nested.nested()
    assert catalog1.entry1.read().equals(catalog1.nested.nested.entry1.read())

    intake/catalog/tests/test_local.py:86:
    _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
    intake/source/csv.py:129: in read
    self._get_schema()
    intake/source/csv.py:115: in _get_schema
    self._open_dataset(urlpath)
    intake/source/csv.py:94: in _open_dataset
    self._dataframe = dask.dataframe.read_csv(
    _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

    args = ('/<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/catalog/tests//entry1_*.csv',)
    kwargs = {'storage_options': None}
    func = <function make_reader.<locals>.read at 0x7ff50e4e9f80>

    @wraps(fn)
    def wrapper(*args, **kwargs):
    func = getattr(self, dispatch_name)
    try:
    return func(*args, **kwargs)
    except Exception as e:
    raise type(e)(
    f"An error occurred while calling the {funcname(func)} "
    f"method registered to the {self.backend} backend.\n"
    f"Original Message: {e}"
    ) from e
    E OSError: An error occurred while calling the read_csv method registered to the pandas backend.
    E Original Message: /<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/catalog/tests//entry1_*.csv resolved to no files

    /usr/lib/python3/dist-packages/dask/backends.py:138: OSError ______________________________ test_info_describe ______________________________

    intake_server = 'intake://localhost:7483'

    def test_info_describe(intake_server):
    catalog = open_catalog(intake_server)

    assert_items_equal(list(catalog), ['use_example1', 'nested', 'entry1',
    'entry1_part', 'remote_env',
    'local_env', 'text', 'arr', 'datetime'])

    info = catalog['entry1'].describe()

    intake/catalog/tests/test_remote_integration.py:29:
    _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
    intake/catalog/base.py:436: in __getitem__
    s = self._get_entry(key)
    intake/catalog/utils.py:45: in wrapper
    return f(self, *args, **kwargs)
    intake/catalog/base.py:323: in _get_entry
    return entry()
    intake/catalog/entry.py:77: in __call__
    s = self.get(**kwargs)
    intake/catalog/remote.py:459: in get
    return open_remote(
    _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

    url = 'http://localhost:7483/', entry = 'entry1', container = 'dataframe' user_parameters = {}, description = 'entry1 full', http_args = {'headers': {}}
    page_size = None, persist_mode = 'default'
    auth = <intake.auth.base.BaseClientAuth object at 0x7ff50c8c93a0>, getenv = True
    getshell = True

    def open_remote(url, entry, container, user_parameters, description, http_args,
    page_size=None, persist_mode=None, auth=None, getenv=None, getshell=None):
    """Create either local direct data source or remote streamed source"""
    from intake.container import container_map
    import msgpack
    import requests
    from requests.compat import urljoin

    if url.startswith('intake://'):
    url = url[len('intake://'):]
    payload = dict(action='open',
    name=entry,
    parameters=user_parameters,
    available_plugins=list(plugin_registry))
    req = requests.post(urljoin(url, 'v1/source'),
    data=msgpack.packb(payload, **pack_kwargs),
    **http_args)
    if req.ok:
    response = msgpack.unpackb(req.content, **unpack_kwargs)

    if 'plugin' in response:
    pl = response['plugin']
    pl = [pl] if isinstance(pl, str) else pl
    # Direct access
    for p in pl:
    if p in plugin_registry:
    source = plugin_registry[p](**response['args'])
    proxy = False
    break
    else:
    proxy = True
    else:
    proxy = True
    if proxy:
    response.pop('container')
    response.update({'name': entry, 'parameters': user_parameters})
    if container == 'catalog':
    response.update({'auth': auth,
    'getenv': getenv,
    'getshell': getshell,
    'page_size': page_size,
    'persist_mode': persist_mode
    # TODO ttl?
    # TODO storage_options?
    })
    source = container_map[container](url, http_args, **response)
    source.description = description
    return source
    else:
    raise Exception('Server error: %d, %s' % (req.status_code, req.reason))
    E Exception: Server error: 400, An error occurred while calling the read_csv method registered to the pandas backend. Original Message: /<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/catalog/tests//entry1_*.csv resolved to no
    files

    intake/catalog/remote.py:519: Exception
    ---------------------------- Captured stderr setup -----------------------------
    2024-02-24 22:52:23,664 - intake - INFO - __main__.py:main:L53 - Creating catalog from:
    2024-02-24 22:52:23,664 - intake - INFO - __main__.py:main:L55 - - /<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/catalog/tests/catalog1.yml
    2024-02-24 22:52:23,970 - intake - INFO - __main__.py:main:L62 - catalog_args: /<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/catalog/tests/catalog1.yml
    2024-02-24 22:52:23,970 - intake - INFO - __main__.py:main:L70 - Listening on localhost:7483
    ----------------------------- Captured stderr call -----------------------------
    Traceback (most recent call last):
    File "/usr/lib/python3/dist-packages/dask/backends.py", line 136, in wrapper
    return func(*args, **kwargs)
    ^^^^^^^^^^^^^^^^^^^^^
    File "/usr/lib/python3/dist-packages/dask/dataframe/io/csv.py", line 763, in read
    return read_pandas(
    ^^^^^^^^^^^^
    File "/usr/lib/python3/dist-packages/dask/dataframe/io/csv.py", line 535, in read_pandas
    raise OSError(f"{urlpath} resolved to no files")
    OSError: /<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/catalog/tests//entry1_*.csv resolved to no files

    The above exception was the direct cause of the following exception:

    Traceback (most recent call last):
    File "/<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/cli/server/server.py", line 306, in post
    source.discover()
    File "/<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/source/base.py", line 347, in discover
    self._load_metadata()
    File "/<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/source/base.py", line 285, in _load_metadata
    self._schema = self._get_schema()
    ^^^^^^^^^^^^^^^^^^
    File "/<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/source/csv.py", line 115, in _get_schema
    self._open_dataset(urlpath)
    File "/<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/source/csv.py", line 94, in _open_dataset
    self._dataframe = dask.dataframe.read_csv(
    ^^^^^^^^^^^^^^^^^^^^^^^^
    File "/usr/lib/python3/dist-packages/dask/backends.py", line 138, in wrapper
    raise type(e)(
    OSError: An error occurred while calling the read_csv method registered to the pandas backend.
    Original Message: /<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/catalog/tests//entry1_*.csv resolved to no files
    400 POST /v1/source (127.0.0.1): Discover failed
    400 POST /v1/source (127.0.0.1) 159.88ms
    ______________________________ test_remote_direct ______________________________

    intake_server = 'intake://localhost:7483'

    def test_remote_direct(intake_server):
    from intake.container.dataframe import RemoteDataFrame
    catalog = open_catalog(intake_server)
    s0 = catalog.entry1()

    intake/catalog/tests/test_remote_integration.py:74:
    _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
    intake/catalog/base.py:391: in __getattr__
    return self[item] # triggers reload_on_change intake/catalog/base.py:436: in __getitem__
    s = self._get_entry(key)
    intake/catalog/utils.py:45: in wrapper
    return f(self, *args, **kwargs)
    intake/catalog/base.py:323: in _get_entry
    return entry()
    intake/catalog/entry.py:77: in __call__
    s = self.get(**kwargs)
    intake/catalog/remote.py:459: in get
    return open_remote(
    _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

    url = 'http://localhost:7483/', entry = 'entry1', container = 'dataframe' user_parameters = {}, description = 'entry1 full', http_args = {'headers': {}}
    page_size = None, persist_mode = 'default'
    auth = <intake.auth.base.BaseClientAuth object at 0x7ff50c8dc1d0>, getenv = True
    getshell = True

    def open_remote(url, entry, container, user_parameters, description, http_args,
    page_size=None, persist_mode=None, auth=None, getenv=None, getshell=None):
    """Create either local direct data source or remote streamed source"""
    from intake.container import container_map
    import msgpack
    import requests
    from requests.compat import urljoin

    if url.startswith('intake://'):
    url = url[len('intake://'):]
    payload = dict(action='open',
    name=entry,
    parameters=user_parameters,
    available_plugins=list(plugin_registry))
    req = requests.post(urljoin(url, 'v1/source'),
    data=msgpack.packb(payload, **pack_kwargs),
    **http_args)
    if req.ok:
    response = msgpack.unpackb(req.content, **unpack_kwargs)

    if 'plugin' in response:
    pl = response['plugin']
    pl = [pl] if isinstance(pl, str) else pl
    # Direct access
    for p in pl:
    if p in plugin_registry:
    source = plugin_registry[p](**response['args'])
    proxy = False
    break
    else:
    proxy = True
    else:
    proxy = True
    if proxy:
    response.pop('container')
    response.update({'name': entry, 'parameters': user_parameters})
    if container == 'catalog':
    response.update({'auth': auth,
    'getenv': getenv,
    'getshell': getshell,
    'page_size': page_size,
    'persist_mode': persist_mode
    # TODO ttl?
    # TODO storage_options?
    })
    source = container_map[container](url, http_args, **response)
    source.description = description
    return source
    else:
    raise Exception('Server error: %d, %s' % (req.status_code, req.reason))
    E Exception: Server error: 400, An error occurred while calling the read_csv method registered to the pandas backend. Original Message: /<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/catalog/tests//entry1_*.csv resolved to no
    files

    intake/catalog/remote.py:519: Exception
    ----------------------------- Captured stderr call -----------------------------
    Traceback (most recent call last):
    File "/usr/lib/python3/dist-packages/dask/backends.py", line 136, in wrapper
    return func(*args, **kwargs)
    ^^^^^^^^^^^^^^^^^^^^^
    File "/usr/lib/python3/dist-packages/dask/dataframe/io/csv.py", line 763, in read
    return read_pandas(
    ^^^^^^^^^^^^
    File "/usr/lib/python3/dist-packages/dask/dataframe/io/csv.py", line 535, in read_pandas
    raise OSError(f"{urlpath} resolved to no files")
    OSError: /<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/catalog/tests//entry1_*.csv resolved to no files

    The above exception was the direct cause of the following exception:

    Traceback (most recent call last):
    File "/<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/cli/server/server.py", line 306, in post
    source.discover()
    File "/<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/source/base.py", line 347, in discover
    self._load_metadata()
    File "/<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/source/base.py", line 285, in _load_metadata
    self._schema = self._get_schema()
    ^^^^^^^^^^^^^^^^^^
    File "/<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/source/csv.py", line 115, in _get_schema
    self._open_dataset(urlpath)
    File "/<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/source/csv.py", line 94, in _open_dataset
    self._dataframe = dask.dataframe.read_csv(
    ^^^^^^^^^^^^^^^^^^^^^^^^
    File "/usr/lib/python3/dist-packages/dask/backends.py", line 138, in wrapper
    raise type(e)(
    OSError: An error occurred while calling the read_csv method registered to the pandas backend.
    Original Message: /<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/catalog/tests//entry1_*.csv resolved to no files
    400 POST /v1/source (127.0.0.1): Discover failed
    400 POST /v1/source (127.0.0.1) 2.71ms
    _______________________ test_remote_datasource_interface _______________________

    intake_server = 'intake://localhost:7483'

    def test_remote_datasource_interface(intake_server):
    catalog = open_catalog(intake_server)

    d = catalog['entry1']

    intake/catalog/tests/test_remote_integration.py:101:
    _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
    intake/catalog/base.py:436: in __getitem__
    s = self._get_entry(key)
    intake/catalog/utils.py:45: in wrapper
    return f(self, *args, **kwargs)
    intake/catalog/base.py:323: in _get_entry
    return entry()
    intake/catalog/entry.py:77: in __call__
    s = self.get(**kwargs)
    intake/catalog/remote.py:459: in get
    return open_remote(
    _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

    url = 'http://localhost:7483/', entry = 'entry1', container = 'dataframe' user_parameters = {}, description = 'entry1 full', http_args = {'headers': {}}
    page_size = None, persist_mode = 'default'
    auth = <intake.auth.base.BaseClientAuth object at 0x7ff50c8dc2c0>, getenv = True
    getshell = True

    def open_remote(url, entry, container, user_parameters, description, http_args,
    page_size=None, persist_mode=None, auth=None, getenv=None, getshell=None):
    """Create either local direct data source or remote streamed source"""
    from intake.container import container_map
    import msgpack
    import requests
    from requests.compat import urljoin

    if url.startswith('intake://'):
    url = url[len('intake://'):]
    payload = dict(action='open',
    name=entry,
    parameters=user_parameters,
    available_plugins=list(plugin_registry))
    req = requests.post(urljoin(url, 'v1/source'),
    data=msgpack.packb(payload, **pack_kwargs),
    **http_args)
    if req.ok:
    response = msgpack.unpackb(req.content, **unpack_kwargs)

    if 'plugin' in response:
    pl = response['plugin']
    pl = [pl] if isinstance(pl, str) else pl
    # Direct access
    for p in pl:
    if p in plugin_registry:
    source = plugin_registry[p](**response['args'])
    proxy = False
    break
    else:
    proxy = True
    else:
    proxy = True
    if proxy:
    response.pop('container')
    response.update({'name': entry, 'parameters': user_parameters})
    if container == 'catalog':
    response.update({'auth': auth,
    'getenv': getenv,
    'getshell': getshell,
    'page_size': page_size,
    'persist_mode': persist_mode
    # TODO ttl?
    # TODO storage_options?
    })
    source = container_map[container](url, http_args, **response)
    source.description = description
    return source
    else:
    raise Exception('Server error: %d, %s' % (req.status_code, req.reason))
    E Exception: Server error: 400, An error occurred while calling the read_csv method registered to the pandas backend. Original Message: /<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/catalog/tests//entry1_*.csv resolved to no
    files

    intake/catalog/remote.py:519: Exception
    ----------------------------- Captured stderr call -----------------------------
    Traceback (most recent call last):
    File "/usr/lib/python3/dist-packages/dask/backends.py", line 136, in wrapper
    return func(*args, **kwargs)
    ^^^^^^^^^^^^^^^^^^^^^
    File "/usr/lib/python3/dist-packages/dask/dataframe/io/csv.py", line 763, in read
    return read_pandas(
    ^^^^^^^^^^^^
    File "/usr/lib/python3/dist-packages/dask/dataframe/io/csv.py", line 535, in read_pandas
    raise OSError(f"{urlpath} resolved to no files")
    OSError: /<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/catalog/tests//entry1_*.csv resolved to no files

    The above exception was the direct cause of the following exception:

    Traceback (most recent call last):
    File "/<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/cli/server/server.py", line 306, in post
    source.discover()
    File "/<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/source/base.py", line 347, in discover
    self._load_metadata()
    File "/<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/source/base.py", line 285, in _load_metadata
    self._schema = self._get_schema()
    ^^^^^^^^^^^^^^^^^^
    File "/<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/source/csv.py", line 115, in _get_schema
    self._open_dataset(urlpath)
    File "/<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/source/csv.py", line 94, in _open_dataset
    self._dataframe = dask.dataframe.read_csv(
    ^^^^^^^^^^^^^^^^^^^^^^^^
    File "/usr/lib/python3/dist-packages/dask/backends.py", line 138, in wrapper
    raise type(e)(
    OSError: An error occurred while calling the read_csv method registered to the pandas backend.
    Original Message: /<<PKGBUILDDIR>>/.pybuild/cpython3_3.12_intake/build/intake/catalog/tests//entry1_*.csv resolved to no files
    400 POST /v1/source (127.0.0.1): Discover failed
    400 POST /v1/source (127.0.0.1) 2.43ms
    __________________________________ test_read ___________________________________

    intake_server = 'intake://localhost:7483'

    def test_read(intake_server):
    catalog = open_catalog(intake_server)

    d = catalog['entry1']

    intake/catalog/tests/test_remote_integration.py:116:
    _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
    intake/catalog/base.py:436: in __getitem__
    s = self._get_entry(key)
    intake/catalog/utils.py:45: in wrapper
    return f(self, *args, **kwargs)
    intake/catalog/base.py:323: in _get_entry
    return entry()
    intake/catalog/entry.py:77: in __call__
    s = self.get(**kwargs)
    intake/catalog/remote.py:459: in get
    return open_remote(
    _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

    url = 'http://localhost:7483/', entry = 'entry1', container = 'dataframe' user_parameters = {}, description = 'entry1 full', http_args = {'headers': {}}
    page_size = None, persist_mode = 'default'
    auth = <intake.auth.base.BaseClientAuth object at 0x7ff50c8dc200>, getenv = True
    getshell = True

    def open_remote(url, entry, container, user_parameters, description, http_args,
    page_size=None, persist_mode=None, auth=None, getenv=None, getshell=None):
    """Create either local direct data source or remote streamed source"""
    from intake.container import container_map
    import msgpack
    import requests
    from requests.compat import urljoin

    if url.startswith('intake://'):
    url = url[len('intake://'):]
    payload = dict(action='open',
    name=entry,
    parameters=user_parameters,

    [continued in next message]

    --- SoupGate-Win32 v1.05
    * Origin: fsxNet Usenet Gateway (21:1/5)