HES Augmented Care Periods (HESAPC_ACP) Dataset#
---------------------------------------------------------------------------
KeyboardInterrupt Traceback (most recent call last)
Cell In[1], line 5
3 sys.path.append(os.path.abspath('../../../../scripts/'))
4 from data_doc_helper import NHSEDataSet as DS, last_modified
----> 5 ds = DS("HESAPC_ACP")
6 last_modified()
File /opt/build/repo/ukllc_book/docs/scripts/data_doc_helper.py:1048, in NHSEDataSet.__init__(self, dataset)
1046 # define std input variables
1047 self.dataset = dataset
-> 1048 self.df_ds = get_nhse_ds(self.dataset)
1049 self.doi = ds_doi(self.df_ds.iloc[0]["table"])
1050 self.ed = get_ed(self.dataset)
File /opt/build/repo/ukllc_book/docs/scripts/data_doc_helper.py:878, in NHSEDataSet.__init__.<locals>.get_nhse_ds(x)
868 def get_nhse_ds(x: str):
869 """Returns DF of dataset info/metrics
870
871 Args:
(...) 875 DF: dataframe of dataset info/metrics
876 """
--> 878 ds = md.get_md_api_dss()
879 df_ds = ds[(ds["source"] == "NHSE") & (ds["table"] == x)]
880 df_ds["source_table"] = df_ds["source"] + "_" + df_ds["table"]
File /opt/build/repo/ukllc_book/docs/scripts/mdapi_functions.py:142, in get_md_api_dss()
135 def get_md_api_dss() -> pd.DataFrame:
136 """Returns all metadata for all datasets in /all-datasets endpoing in API
137
138 Returns:
139 pd.DataFrame: DF of all dataset metadata
140 """
--> 142 r_d = requests.get(
143 "https://metadata-api-4a09f2833a54.herokuapp.com/all-datasets",
144 headers={'access_token': API_KEY.strip()})
146 # return error message if API response != 200
147 try:
File ~/.local/share/mise/installs/python/3.13.7/lib/python3.13/site-packages/requests/api.py:73, in get(url, params, **kwargs)
62 def get(url, params=None, **kwargs):
63 r"""Sends a GET request.
64
65 :param url: URL for the new :class:`Request` object.
(...) 70 :rtype: requests.Response
71 """
---> 73 return request("get", url, params=params, **kwargs)
File ~/.local/share/mise/installs/python/3.13.7/lib/python3.13/site-packages/requests/api.py:59, in request(method, url, **kwargs)
55 # By using the 'with' statement we are sure the session is closed, thus we
56 # avoid leaving sockets open which can trigger a ResourceWarning in some
57 # cases, and look like a memory leak in others.
58 with sessions.Session() as session:
---> 59 return session.request(method=method, url=url, **kwargs)
File ~/.local/share/mise/installs/python/3.13.7/lib/python3.13/site-packages/requests/sessions.py:589, in Session.request(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert, json)
584 send_kwargs = {
585 "timeout": timeout,
586 "allow_redirects": allow_redirects,
587 }
588 send_kwargs.update(settings)
--> 589 resp = self.send(prep, **send_kwargs)
591 return resp
File ~/.local/share/mise/installs/python/3.13.7/lib/python3.13/site-packages/requests/sessions.py:724, in Session.send(self, request, **kwargs)
721 if allow_redirects:
722 # Redirect resolving generator.
723 gen = self.resolve_redirects(r, request, **kwargs)
--> 724 history = [resp for resp in gen]
725 else:
726 history = []
File ~/.local/share/mise/installs/python/3.13.7/lib/python3.13/site-packages/requests/sessions.py:265, in SessionRedirectMixin.resolve_redirects(self, resp, req, stream, timeout, verify, cert, proxies, yield_requests, **adapter_kwargs)
263 yield req
264 else:
--> 265 resp = self.send(
266 req,
267 stream=stream,
268 timeout=timeout,
269 verify=verify,
270 cert=cert,
271 proxies=proxies,
272 allow_redirects=False,
273 **adapter_kwargs,
274 )
276 extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
278 # extract redirect url, if any, for the next loop
File ~/.local/share/mise/installs/python/3.13.7/lib/python3.13/site-packages/requests/sessions.py:703, in Session.send(self, request, **kwargs)
700 start = preferred_clock()
702 # Send the request
--> 703 r = adapter.send(request, **kwargs)
705 # Total elapsed time of the request (approximately)
706 elapsed = preferred_clock() - start
File ~/.local/share/mise/installs/python/3.13.7/lib/python3.13/site-packages/requests/adapters.py:644, in HTTPAdapter.send(self, request, stream, timeout, verify, cert, proxies)
641 timeout = TimeoutSauce(connect=timeout, read=timeout)
643 try:
--> 644 resp = conn.urlopen(
645 method=request.method,
646 url=url,
647 body=request.body,
648 headers=request.headers,
649 redirect=False,
650 assert_same_host=False,
651 preload_content=False,
652 decode_content=False,
653 retries=self.max_retries,
654 timeout=timeout,
655 chunked=chunked,
656 )
658 except (ProtocolError, OSError) as err:
659 raise ConnectionError(err, request=request)
File ~/.local/share/mise/installs/python/3.13.7/lib/python3.13/site-packages/urllib3/connectionpool.py:787, in HTTPConnectionPool.urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, preload_content, decode_content, **response_kw)
784 response_conn = conn if not release_conn else None
786 # Make the request on the HTTPConnection object
--> 787 response = self._make_request(
788 conn,
789 method,
790 url,
791 timeout=timeout_obj,
792 body=body,
793 headers=headers,
794 chunked=chunked,
795 retries=retries,
796 response_conn=response_conn,
797 preload_content=preload_content,
798 decode_content=decode_content,
799 **response_kw,
800 )
802 # Everything went great!
803 clean_exit = True
File ~/.local/share/mise/installs/python/3.13.7/lib/python3.13/site-packages/urllib3/connectionpool.py:534, in HTTPConnectionPool._make_request(self, conn, method, url, body, headers, retries, timeout, chunked, response_conn, preload_content, decode_content, enforce_content_length)
532 # Receive the response from the server
533 try:
--> 534 response = conn.getresponse()
535 except (BaseSSLError, OSError) as e:
536 self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
File ~/.local/share/mise/installs/python/3.13.7/lib/python3.13/site-packages/urllib3/connection.py:565, in HTTPConnection.getresponse(self)
562 _shutdown = getattr(self.sock, "shutdown", None)
564 # Get the response from http.client.HTTPConnection
--> 565 httplib_response = super().getresponse()
567 try:
568 assert_header_parsing(httplib_response.msg)
File ~/.local/share/mise/installs/python/3.13.7/lib/python3.13/http/client.py:1430, in HTTPConnection.getresponse(self)
1428 try:
1429 try:
-> 1430 response.begin()
1431 except ConnectionError:
1432 self.close()
File ~/.local/share/mise/installs/python/3.13.7/lib/python3.13/http/client.py:331, in HTTPResponse.begin(self)
329 # read until we get a non-100 response
330 while True:
--> 331 version, status, reason = self._read_status()
332 if status != CONTINUE:
333 break
File ~/.local/share/mise/installs/python/3.13.7/lib/python3.13/http/client.py:292, in HTTPResponse._read_status(self)
291 def _read_status(self):
--> 292 line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
293 if len(line) > _MAXLINE:
294 raise LineTooLong("status line")
File ~/.local/share/mise/installs/python/3.13.7/lib/python3.13/socket.py:719, in SocketIO.readinto(self, b)
717 raise OSError("cannot read from timed out object")
718 try:
--> 719 return self._sock.recv_into(b)
720 except timeout:
721 self._timeout_occurred = True
File ~/.local/share/mise/installs/python/3.13.7/lib/python3.13/ssl.py:1304, in SSLSocket.recv_into(self, buffer, nbytes, flags)
1300 if flags != 0:
1301 raise ValueError(
1302 "non-zero flags not allowed in calls to recv_into() on %s" %
1303 self.__class__)
-> 1304 return self.read(nbytes, buffer)
1305 else:
1306 return super().recv_into(buffer, nbytes, flags)
File ~/.local/share/mise/installs/python/3.13.7/lib/python3.13/ssl.py:1138, in SSLSocket.read(self, len, buffer)
1136 try:
1137 if buffer is not None:
-> 1138 return self._sslobj.read(len, buffer)
1139 else:
1140 return self._sslobj.read(len)
KeyboardInterrupt:
1. Summary#
This dataset is a subset of the HES Admitted Patient Care (APC) dataset and was retired in 2006. It includes information about patients admitted to intensive care settings.
Dataset Descriptor | Dataset-specific Information |
---|---|
Name of Dataset in TRE | NHSE_HESAPC_ACP |
Citation (APA) | Data Services, NHS England (NHSE). (2024). NHS England: Hospital Episode Statistics (HES) Augmented Care Periods (ACP) Dataset. UK Longitudinal Linkage Collaboration (UK LLC). https://doi.org/10.71760/ukllc-dataset-00365-03 |
Download Citation | Citeproc JSON BibTeX RIS |
Series | NHS England |
Owner | NHS England |
Temporal Coverage in the TRE | 04/1997 - 12/2006 |
Geographical Coverage | England |
Participant Count | 3102 |
Number of Variables | 123 |
Number of Observations | 3277 |
Key Link | https://digital.nhs.uk/data-and-information/data-tools-and-services/data-services/hospital-episode-statistics |
Keywords | Inpatients, NHS hospitals |
Latest Extract Date | 2024-04-26 |
Specific Restrictions to Data Use | None |
Build a Data Request | https://explore.ukllc.ac.uk/ |
2. Metrics#
Click on the plus sign to see the number of participants represented in each dataset.
Table 2: Participants from each LPS represented in the HESAPC_ACP dataset in the UK LLC TRE. Note: Individual cohort counts of less than 10 are suppressed to <10 and excluded from total participant counts for datasets.
3. Version History#
Version | 1 | 2 | 3 |
---|---|---|---|
Name in TRE | NHSE_HESAPC_ACP | NHSE_HESAPC_ACP | NHSE_HESAPC_ACP |
Version Date | 21 Dec 2022 | 13 Apr 2023 | 26 Apr 2024 |
Participant Count | N/A | N/A | N/A |
Number of Variables | 123 | 123 | 123 |
Number of Observations | 2766 | 3053 | 3277 |
DOI | 10.83126/ukllc-dataset-00040-01 | 10.83126/ukllc-dataset-00040-02 | 10.83126/ukllc-dataset-00040-03 |
Change Log | 10.83126/ukllc-dataset-00040-01/activities | 10.83126/ukllc-dataset-00040-02/activities | 10.83126/ukllc-dataset-00040-03/activities |
4. Documentation#
We are currently building a documentation storage system which will host useful documents related to datasets and data owners. We will surface these documents on Guidebook.
5. Useful Syntax#
Below we will include syntax that may be helpful to other researchers in the UK LLC TRE. For longer scripts, we will include a snippet of the code plus a link to the UK LLC GitHub repository where you can find the full scripts.