66# Python Standard Library imports
77import asyncio
88import json
9+ import http .client
910import sys
11+ import urllib
1012
1113# External library imports
1214from fastapi import FastAPI , status
1719from oshminer .supported_platforms import supported_domains
1820from oshminer .errors import exceptions
1921
22+ # Default Wikifactory API URL
23+ WIF_API_DEFAULT : str = "https://wikifactory.com/api/graphql"
2024class MiningRequest (BaseModel ):
2125 repo_urls : list [HttpUrl ] = set ()
2226 requested_data : list [str ] = set ()
27+ wikifactory_API_URL : str = WIF_API_DEFAULT
2328
2429# Supported data-mining request types. Items in `required_data` must
2530# be from this list.
@@ -44,23 +49,58 @@ class MiningRequest(BaseModel):
4449async def root ():
4550 return {"message" : "Dashboard data-mining backend is on" }
4651
47- async def process_repo (repo : HttpUrl , requests : list [str ], responses : list ):
52+ async def process_repo (repo : HttpUrl , requests : list [str ], responses : list , WIF_API : str = WIF_API_DEFAULT ):
4853 platform : str = repo .host .replace ("www." , "" )
49- try :
50- repo_info : dict = await supported_domains [platform ](repo , requests )
51- except exceptions .BadRepoError :
52- return JSONResponse (
53- status_code = status .HTTP_400_BAD_REQUEST ,
54- content = f"Error with repository: { repo } "
55- )
56- responses .append (repo_info )
54+ # If a custom Wikifactory API URL is provided, then use it.
55+ if WIF_API != WIF_API_DEFAULT :
56+ try :
57+ repo_info : dict = await supported_domains [platform ](repo , requests , WIF_API )
58+ except exceptions .BadRepoError :
59+ return JSONResponse (
60+ status_code = status .HTTP_400_BAD_REQUEST ,
61+ content = f"Error with repository: { repo } "
62+ )
63+ responses .append (repo_info )
64+ else :
65+ try :
66+ repo_info : dict = await supported_domains [platform ](repo , requests )
67+ except exceptions .BadRepoError :
68+ return JSONResponse (
69+ status_code = status .HTTP_400_BAD_REQUEST ,
70+ content = f"Error with repository: { repo } "
71+ )
72+ responses .append (repo_info )
5773
58- @app .get (
74+ @app .post (
5975 "/data/" ,
6076 name = "API endpoint" ,
6177 description = "Primary endpoint for requesting data."
6278 )
6379async def mining_request (request_body : MiningRequest ):
80+ #
81+ # Check if custom Wikifactory API URL is provided and test it
82+ #
83+
84+ if request_body .wikifactory_API_URL != WIF_API_DEFAULT :
85+ print (
86+ f"Custom Wikifactory API URL detected: { request_body .wikifactory_API_URL } " ,
87+ file = sys .stderr
88+ )
89+ try :
90+ api_url_response = urllib .request .urlopen (
91+ request_body .wikifactory_API_URL ,
92+ timeout = 10
93+ )
94+ if api_url_response .status != 200 :
95+ raise exceptions .BadWIFAPIError
96+ except (exceptions .BadWIFAPIError , urllib .error .URLError , http .client .BadStatusLine ) as err :
97+ return JSONResponse (
98+ status_code = status .HTTP_400_BAD_REQUEST ,
99+ content = f"Error reaching Wikifactory API URL: { request_body .wikifactory_API_URL } { err } "
100+ )
101+ elif request_body .wikifactory_API_URL is None :
102+ request_body .wikifactory_API_URL = WIF_API_DEFAULT
103+
64104 #
65105 # Check API client's request body
66106 #
@@ -103,7 +143,16 @@ async def mining_request(request_body: MiningRequest):
103143 # Construct, send API requests, and get results
104144 #
105145
106- await asyncio .gather (* [process_repo (repo , request_body .requested_data , response_list ) for repo in request_body .repo_urls ])
146+ await asyncio .gather (
147+ * [
148+ process_repo (
149+ repo ,
150+ request_body .requested_data ,
151+ response_list ,
152+ WIF_API = request_body .wikifactory_API_URL
153+ ) for repo in request_body .repo_urls
154+ ]
155+ )
107156
108157 # for repo in request_body.repo_urls:
109158 # platform = repo.host.replace("www.", "")
0 commit comments