AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAa
This commit is contained in:
commit
70097239ec
14
___quick.py
Normal file
14
___quick.py
Normal file
@ -0,0 +1,14 @@
|
||||
import json
|
||||
from collections import OrderedDict
|
||||
|
||||
with open("items.json", "r") as file:
|
||||
this = json.load(file)
|
||||
|
||||
keys = sorted(this.keys())
|
||||
|
||||
newthis = OrderedDict()
|
||||
for i in keys:
|
||||
newthis[i] = this[i]
|
||||
|
||||
with open("items.json", "w") as file:
|
||||
json.dump(newthis, file)
|
6520
allitems.csv
Normal file
6520
allitems.csv
Normal file
File diff suppressed because it is too large
Load Diff
98
async.py
Normal file
98
async.py
Normal file
@ -0,0 +1,98 @@
|
||||
import aiohttp
|
||||
import asyncio
|
||||
import json
|
||||
|
||||
|
||||
with open("items.json", "r") as jfile:
|
||||
items = json.load(jfile)
|
||||
|
||||
qualities = {
|
||||
1: " normal",
|
||||
2: " good",
|
||||
3: " outstanding",
|
||||
4: " excellent",
|
||||
5: " masterpiece"
|
||||
}
|
||||
|
||||
def gen_urls():
|
||||
its = sorted(items.keys())
|
||||
urls = []
|
||||
for i in range(0, 5850, 200):
|
||||
curritems = ",".join(its[i:i+200])
|
||||
url = f"https://www.albion-online-data.com/api/v2/stats/prices/{curritems}@2?locations=3005,3003"
|
||||
urls.append(url)
|
||||
return urls
|
||||
|
||||
|
||||
async def fetch(session, url):
|
||||
"""Execute an http call async
|
||||
Args:
|
||||
session: contexte for making the http call
|
||||
url: URL to call
|
||||
Return:
|
||||
responses: A dict like object containing http response
|
||||
"""
|
||||
async with session.get(url) as response:
|
||||
resp = await response.json()
|
||||
return resp
|
||||
|
||||
async def fetch_all(urls):
|
||||
""" Gather many HTTP call made async
|
||||
Args:
|
||||
cities: a list of string
|
||||
Return:
|
||||
responses: A list of dict like object containing http response
|
||||
"""
|
||||
async with aiohttp.ClientSession() as session:
|
||||
tasks = []
|
||||
for url in urls:
|
||||
tasks.append(
|
||||
fetch(
|
||||
session,
|
||||
url,
|
||||
)
|
||||
)
|
||||
responses = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
return responses
|
||||
|
||||
|
||||
def run(urls):
|
||||
responses = asyncio.run(fetch_all(urls))
|
||||
return responses
|
||||
|
||||
|
||||
def parser(itemdata):
|
||||
caer = {}
|
||||
black = {}
|
||||
for kindajson in itemdata:
|
||||
thing = json.loads(kindajson)
|
||||
for item in thing:
|
||||
name = items[item["item_id"]] + qualities[item["quality"]]
|
||||
if item["city"] == "Black Market":
|
||||
black[name] = item["buy_price_max"]
|
||||
else:
|
||||
caer[name] = item["sell_price_min"]
|
||||
profits = []
|
||||
for k in black.keys():
|
||||
sell = black[k]
|
||||
buy = caer[k]
|
||||
profit = sell * 0.94 - buy
|
||||
if profit > 0:
|
||||
row = {
|
||||
"black" : sell,
|
||||
"caer" : buy,
|
||||
"profit" : profit,
|
||||
"name" : k
|
||||
}
|
||||
profits.append(row)
|
||||
return profits
|
||||
|
||||
|
||||
|
||||
|
||||
links=gen_urls()
|
||||
responses = run(links)
|
||||
print(parser(responses))
|
||||
|
||||
with open("res.json", "w") as jfile:
|
||||
json.dump(responses, jfile)
|
5851
items.json
Normal file
5851
items.json
Normal file
File diff suppressed because it is too large
Load Diff
6982
itemstring.json
Normal file
6982
itemstring.json
Normal file
File diff suppressed because it is too large
Load Diff
73
synch.py
Normal file
73
synch.py
Normal file
@ -0,0 +1,73 @@
|
||||
import json
|
||||
import requests
|
||||
from datetime import datetime
|
||||
from rich.console import Console
|
||||
from rich.table import Column, Table
|
||||
|
||||
|
||||
with open("items.json", "r") as itfile:
|
||||
itemnames = json.load(itfile)
|
||||
|
||||
qualities = {
|
||||
1 : " normal",
|
||||
2 : " good",
|
||||
3 : " outstanding",
|
||||
4 : " excellent",
|
||||
5 : " masterpiece"
|
||||
|
||||
}
|
||||
|
||||
def fetch_data():
|
||||
step = 200
|
||||
responses = []
|
||||
for i in range(0,5850,step):
|
||||
print(i)
|
||||
items = ",".join(list(itemnames.keys())[i:i+step])
|
||||
url = "https://www.albion-online-data.com/api/v2/stats/prices/{}?locations=3005,3003".format(items)
|
||||
content = requests.get(url).content
|
||||
toolong = b'<html>\r\n<head><title>414 Request-URI Too Large</title></head>\r\n<body>\r\n<center><h1>414 Request-URI Too Large</h1></center>\r\n<hr><center>nginx/1.16.1</center>\r\n</body>\r\n</html>\r\n'
|
||||
if content == toolong:
|
||||
print("TOOOOO LONG")
|
||||
else:
|
||||
responses.append(content)
|
||||
black = {}
|
||||
caer = {}
|
||||
for j in responses:
|
||||
j = json.loads(j)
|
||||
for item in j:
|
||||
name = itemnames[item["item_id"]] + qualities[item["quality"]]
|
||||
if item["city"] == "Black Market":
|
||||
black[name] = item["buy_price_max"]
|
||||
else:
|
||||
caer[name] = item["sell_price_min"]
|
||||
|
||||
profits = []
|
||||
for k in black.keys():
|
||||
if not black[k] or not caer[k]:
|
||||
continue
|
||||
if black[k] < caer[k]:
|
||||
continue
|
||||
profit = black[k] * 0.94 - caer[k]
|
||||
profits.append((profit, black[k] * 0.94, caer[k], k))
|
||||
profits = sorted(profits)
|
||||
return profits
|
||||
|
||||
|
||||
def print_data(items):
|
||||
#list of (blackmarketprice, caer price, itemname)
|
||||
console = Console()
|
||||
table = Table(show_header=True, header_style="bold green")
|
||||
table.add_column("Item")
|
||||
table.add_column("Profit(tax removed)")
|
||||
table.add_column("Caer price")
|
||||
table.add_column("Black market price")
|
||||
for i in items:
|
||||
if i[0] < 500:
|
||||
continue
|
||||
row = [i[3], str(i[0]/1000) + "k",str(i[2]/1000) + "k", str(i[1]/1000) + "k"]
|
||||
table.add_row(*row)
|
||||
console.print(table)
|
||||
|
||||
|
||||
|
||||
print_data(fetch_data())
|
Loading…
Reference in New Issue
Block a user