1010import json
1111import os
1212import subprocess
13+ import threading
1314import time
1415
1516import requests
7172# ── Pagination helpers ────────────────────────────────────────────────────────
7273_SKIP_LIST_KEYS = frozenset ({"schemas" })
7374
75+ # Server-side state for background "Load All" pagination
76+ _load_all_state : Dict [str , Any ] = {
77+ "running" : False ,
78+ "pages" : 0 ,
79+ "total_items" : 0 ,
80+ "items" : [],
81+ "done" : False ,
82+ "error" : None ,
83+ "list_key" : None ,
84+ "initial_data" : {},
85+ "last_req" : {},
86+ "elapsed_ms" : 0 ,
87+ }
88+
7489
7590def _detect_next_page_token (data : Any ) -> Optional [str ]:
7691 """Return next_page_token if the response has more pages, else None."""
@@ -932,6 +947,7 @@ def _custom_section():
932947 dcc .Store (id = "sso-pending" , data = None ), # {"host": "..."} while browser OAuth is running
933948 dcc .Interval (id = "sso-poller" , interval = 1000 , disabled = True , n_intervals = 0 ),
934949 dcc .Interval (id = "page-ticker" , interval = 500 , disabled = True , n_intervals = 0 ),
950+ dcc .Interval (id = "load-all-ticker" , interval = 600 , disabled = True , n_intervals = 0 ),
935951
936952 dcc .Store (id = "dropdown-open" , data = False ), # tracks dropdown visibility
937953 dcc .Store (id = "response-cache" , data = {}), # {endpoint_id: {result, chips}} — cached API responses
@@ -1746,34 +1762,11 @@ def abort_pagination(n_clicks, page_state):
17461762 return {** state , "running" : False , "error" : "Cancelled" }
17471763
17481764
1749- # 11h. "Load All" — fetch all offset-paginated pages in one synchronous callback
1750- @app .callback (
1751- Output ("response-container" , "children" , allow_duplicate = True ),
1752- Output ("response-cache" , "data" , allow_duplicate = True ),
1753- Output ("chips-store" , "data" , allow_duplicate = True ),
1754- Output ("fetch-status-bar" , "children" , allow_duplicate = True ),
1755- Output ("sp-load-all-btn" , "style" , allow_duplicate = True ),
1756- Input ("sp-load-all-btn" , "n_clicks" ),
1757- State ("last-request" , "data" ),
1758- State ("conn-config" , "data" ),
1759- State ("response-cache" , "data" ),
1760- prevent_initial_call = True ,
1761- )
1762- def load_all_pages (n_clicks , last_req , conn_config , cache ):
1763- NO = (no_update , no_update , no_update , no_update , no_update )
1764- if not n_clicks or not last_req :
1765- return NO
1766- initial_data = last_req .get ("initial_data" , {})
1767- if not isinstance (initial_data , dict ) or not initial_data .get ("has_more" ):
1768- return NO
1769- list_key = _find_list_key (initial_data )
1770- if not list_key :
1771- return NO
1772-
1773- host , token = _resolve_conn (conn_config )
1774- if not token or not host :
1775- return NO
1765+ # 11h. "Load All" — background thread fetches pages; ticker polls progress.
17761766
1767+ def _load_all_worker (last_req , host , token , list_key , initial_data ):
1768+ """Background thread that fetches all pages and updates _load_all_state."""
1769+ state = _load_all_state
17771770 items = list (initial_data .get (list_key , []))
17781771 limit = len (items ) or 25
17791772 offset = len (items )
@@ -1782,7 +1775,12 @@ def load_all_pages(n_clicks, last_req, conn_config, cache):
17821775 total_elapsed = last_req .get ("elapsed_ms" , 0 )
17831776 pages = 1
17841777
1785- while pages < 200 :
1778+ state ["items" ] = items
1779+ state ["pages" ] = pages
1780+ state ["total_items" ] = len (items )
1781+ state ["elapsed_ms" ] = total_elapsed
1782+
1783+ while pages < 200 and state ["running" ]:
17861784 qp = dict (last_req .get ("query_params" ) or {})
17871785 if use_token and next_token :
17881786 qp ["page_token" ] = next_token
@@ -1791,10 +1789,12 @@ def load_all_pages(n_clicks, last_req, conn_config, cache):
17911789 qp ["limit" ] = str (limit )
17921790
17931791 t0 = time .perf_counter ()
1794- r = make_api_call (last_req ["method" ], last_req ["path" ], token , host , query_params = qp , body = last_req .get ("body" ))
1792+ r = make_api_call (last_req ["method" ], last_req ["path" ], token , host ,
1793+ query_params = qp , body = last_req .get ("body" ))
17951794 total_elapsed += int ((time .perf_counter () - t0 ) * 1000 )
17961795
17971796 if not r ["success" ]:
1797+ state ["error" ] = r .get ("error" , "API error" )
17981798 break
17991799
18001800 page_data = r ["data" ]
@@ -1804,14 +1804,113 @@ def load_all_pages(n_clicks, last_req, conn_config, cache):
18041804 offset += limit
18051805 next_token = _detect_next_page_token (page_data )
18061806
1807+ state ["items" ] = items
1808+ state ["pages" ] = pages
1809+ state ["total_items" ] = len (items )
1810+ state ["elapsed_ms" ] = total_elapsed
1811+
18071812 if not page_data .get ("has_more" ):
18081813 break
18091814
1815+ state ["running" ] = False
1816+ state ["done" ] = True
1817+
1818+
1819+ # 11h-start: Click "Load All" → start background thread + enable ticker
1820+ @app .callback (
1821+ Output ("load-all-ticker" , "disabled" ),
1822+ Output ("fetch-status-bar" , "children" , allow_duplicate = True ),
1823+ Output ("sp-load-all-btn" , "style" , allow_duplicate = True ),
1824+ Input ("sp-load-all-btn" , "n_clicks" ),
1825+ State ("last-request" , "data" ),
1826+ State ("conn-config" , "data" ),
1827+ prevent_initial_call = True ,
1828+ )
1829+ def start_load_all (n_clicks , last_req , conn_config ):
1830+ NO = (True , no_update , no_update )
1831+ if not n_clicks or not last_req :
1832+ return NO
1833+ initial_data = last_req .get ("initial_data" , {})
1834+ if not isinstance (initial_data , dict ) or not initial_data .get ("has_more" ):
1835+ return NO
1836+ list_key = _find_list_key (initial_data )
1837+ if not list_key :
1838+ return NO
1839+
1840+ host , token = _resolve_conn (conn_config )
1841+ if not token or not host :
1842+ return NO
1843+
1844+ # Reset shared state
1845+ _load_all_state .update ({
1846+ "running" : True , "done" : False , "error" : None ,
1847+ "pages" : 1 , "total_items" : 0 , "items" : [],
1848+ "list_key" : list_key , "initial_data" : initial_data ,
1849+ "last_req" : last_req , "elapsed_ms" : 0 ,
1850+ })
1851+
1852+ # Launch background thread
1853+ t = threading .Thread (target = _load_all_worker , args = (last_req , host , token , list_key , initial_data ), daemon = True )
1854+ t .start ()
1855+
1856+ status = html .Div ([
1857+ html .I (className = "bi bi-arrow-repeat me-2 spin-icon" ),
1858+ "Loading page 1…" ,
1859+ ], className = "fetch-status-inner loading" )
1860+
1861+ return False , status , {"display" : "none" }
1862+
1863+
1864+ # 11h-tick: Poll progress from background thread
1865+ @app .callback (
1866+ Output ("response-container" , "children" , allow_duplicate = True ),
1867+ Output ("response-cache" , "data" , allow_duplicate = True ),
1868+ Output ("chips-store" , "data" , allow_duplicate = True ),
1869+ Output ("fetch-status-bar" , "children" , allow_duplicate = True ),
1870+ Output ("load-all-ticker" , "disabled" , allow_duplicate = True ),
1871+ Input ("load-all-ticker" , "n_intervals" ),
1872+ State ("response-cache" , "data" ),
1873+ prevent_initial_call = True ,
1874+ )
1875+ def poll_load_all (n_intervals , cache ):
1876+ NO = (no_update , no_update , no_update , no_update , no_update )
1877+ state = _load_all_state
1878+ pages = state .get ("pages" , 0 )
1879+ total = state .get ("total_items" , 0 )
1880+ elapsed = state .get ("elapsed_ms" , 0 )
1881+
1882+ if state .get ("running" ):
1883+ # Still loading — update status bar only
1884+ status = html .Div ([
1885+ html .I (className = "bi bi-arrow-repeat me-2 spin-icon" ),
1886+ f"Loading page { pages + 1 } … ({ total :,} items so far · { elapsed :,} ms)" ,
1887+ ], className = "fetch-status-inner loading" )
1888+ return no_update , no_update , no_update , status , False
1889+
1890+ # Done or error — render final result
1891+ if state .get ("error" ):
1892+ status = html .Div ([
1893+ html .I (className = "bi bi-exclamation-triangle-fill me-2" ),
1894+ f"Error after { pages } pages ({ total :,} items): { state ['error' ]} " ,
1895+ ], className = "fetch-status-inner error" )
1896+ # Still render whatever we got
1897+ else :
1898+ status = html .Div ([
1899+ html .I (className = "bi bi-check-circle-fill me-2" ),
1900+ f"All pages loaded — { total :,} items · { pages } pages · { elapsed :,} ms" ,
1901+ ], className = "fetch-status-inner done" )
1902+
1903+ items = state .get ("items" , [])
1904+ list_key = state .get ("list_key" )
1905+ initial_data = state .get ("initial_data" , {})
1906+ last_req = state .get ("last_req" , {})
1907+
18101908 merged_data = {** initial_data , list_key : items }
18111909 merged_data .pop ("has_more" , None )
1910+ merged_data .pop ("next_page_token" , None )
18121911 merged_result = {
18131912 "status_code" : last_req .get ("status_code" , 200 ),
1814- "elapsed_ms" : total_elapsed ,
1913+ "elapsed_ms" : elapsed ,
18151914 "data" : merged_data ,
18161915 "success" : True , "error" : None ,
18171916 "url" : last_req .get ("url" , "" ),
@@ -1822,12 +1921,10 @@ def load_all_pages(n_clicks, last_req, conn_config, cache):
18221921 if ep_id :
18231922 new_cache [ep_id ] = {"result" : merged_result , "chips" : chips or None }
18241923
1825- status = html .Div ([
1826- html .I (className = "bi bi-check-circle-fill me-2" ),
1827- f"All pages loaded — { len (items ):,} items · { pages } pages · { total_elapsed } ms" ,
1828- ], className = "fetch-status-inner done" )
1924+ # Reset state for next use
1925+ _load_all_state .update ({"running" : False , "done" : False , "items" : []})
18291926
1830- return build_response_panel (merged_result , chips ), new_cache , chips or None , status , { "display" : "none" }
1927+ return build_response_panel (merged_result , chips ), new_cache , chips or None , status , True
18311928
18321929
18331930# 13. Search filter
0 commit comments