Skip to content

Commit 677b740

Browse files
authored
Merge pull request #3 from dhruvan2006/fix/non-static-url
Fixed the hard coding of leverage url to get contract address
2 parents 350791f + feeda70 commit 677b740

File tree

3 files changed

+59
-21
lines changed

3 files changed

+59
-21
lines changed

pytoros/token.py

Lines changed: 18 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
import requests
22
import pandas as pd
3+
from bs4 import BeautifulSoup
4+
import json
35

46
class Token:
57
"""
@@ -11,7 +13,7 @@ class Token:
1113
"ARB": 42161, # Arbitrum
1214
"BASE": 8453, # Base
1315
}
14-
TOROS_URL = "https://toros.finance/_next/data/mw471zlJ9uL1Ee-_If1FI/category/leverage.json?category=leverage"
16+
TOROS_URL = "https://toros.finance/"
1517
GRAPHQL_URL = "https://api-v2.dhedge.org/graphql"
1618
SCALE_FACTOR = 10**18
1719

@@ -36,14 +38,21 @@ def _get_chain_id(self) -> int:
3638
def _get_token_address(self) -> str:
3739
response = requests.get(self.TOROS_URL, timeout=10)
3840
response.raise_for_status()
39-
data = response.json()
40-
41-
chain_id = self._get_chain_id()
42-
products = data.get('pageProps', {}).get('products', [])
43-
44-
for product in products:
45-
if product.get('chainId') == chain_id and product.get('symbol') == self.symbol:
46-
return product.get('address')
41+
html_content = response.text
42+
43+
soup = BeautifulSoup(html_content, "html.parser")
44+
script_tag = soup.find("script", id="__NEXT_DATA__")
45+
46+
if script_tag:
47+
try:
48+
json_data = json.loads(script_tag.string)
49+
chain_id = self._get_chain_id()
50+
leverage = json_data["props"]["pageProps"]["categoryMap"]["Leverage"]
51+
for token in leverage:
52+
if token["chainId"] == chain_id and token["symbol"] == self.symbol:
53+
return token["address"]
54+
except json.JSONDecodeError as e:
55+
print(f"Error decoding JSON: {e}")
4756

4857
raise ValueError(f"Token with symbol '{self.symbol}' and chain '{self.chain_name}' not found.")
4958

requirements.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
beautifulsoup4==4.12.3
12
certifi==2024.8.30
23
charset-normalizer==3.4.0
34
idna==3.10
@@ -8,5 +9,6 @@ pytz==2024.2
89
requests==2.32.3
910
setuptools==75.6.0
1011
six==1.16.0
12+
soupsieve==2.6
1113
tzdata==2024.2
1214
urllib3==2.2.3

tests/test_token.py

Lines changed: 39 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -6,15 +6,28 @@
66
class TestToken(unittest.TestCase):
77
@patch('requests.get')
88
def test_get_token_address_success(self, mock_get):
9+
mock_html = '''
10+
<html>
11+
<body>
12+
<script id="__NEXT_DATA__" type="application/json">
13+
{
14+
"props": {
15+
"pageProps": {
16+
"categoryMap": {
17+
"Leverage": [
18+
{"chainId": 42161, "symbol": "BTCBULL3X", "address": "0x1234567890abcdef"}
19+
]
20+
}
21+
}
22+
}
23+
}
24+
</script>
25+
</body>
26+
</html>
27+
'''
928
mock_response = MagicMock()
1029
mock_response.status_code = 200
11-
mock_response.json.return_value = {
12-
'pageProps': {
13-
'products': [
14-
{'chainId': 42161, 'symbol': 'BTCBULL3X', 'address': '0x1234567890abcdef'}
15-
]
16-
}
17-
}
30+
mock_response.text = mock_html
1831
mock_get.return_value = mock_response
1932

2033
token = Token("ARB:BTCBULL3X")
@@ -24,13 +37,27 @@ def test_get_token_address_success(self, mock_get):
2437

2538
@patch('requests.get')
2639
def test_get_token_address_not_found(self, mock_get):
40+
# Mock the HTML response with an empty "Leverage" array
41+
mock_html = '''
42+
<html>
43+
<body>
44+
<script id="__NEXT_DATA__" type="application/json">
45+
{
46+
"props": {
47+
"pageProps": {
48+
"categoryMap": {
49+
"Leverage": []
50+
}
51+
}
52+
}
53+
}
54+
</script>
55+
</body>
56+
</html>
57+
'''
2758
mock_response = MagicMock()
2859
mock_response.status_code = 200
29-
mock_response.json.return_value = {
30-
'pageProps': {
31-
'products': []
32-
}
33-
}
60+
mock_response.text = mock_html
3461
mock_get.return_value = mock_response
3562

3663
token = Token("ARB:BTCBULL3X")

0 commit comments

Comments
 (0)