Przeglądaj źródła

support arbitrary cache expiry

raylu 1 tydzień temu
rodzic
commit
fdc7a2ed3c
4 zmienionych plików z 17 dodań i 10 usunięć
  1. 9 4
      cache.py
  2. 3 2
      company.py
  3. 3 2
      integration.py
  4. 2 2
      mat_competitors.py

+ 9 - 4
cache.py

@@ -1,3 +1,4 @@
+import datetime
 import lzma
 import lzma
 import pathlib
 import pathlib
 import time
 import time
@@ -10,16 +11,20 @@ import urllib.parse
 if typing.TYPE_CHECKING:
 if typing.TYPE_CHECKING:
 	import httpx._types
 	import httpx._types
 
 
+ONE_DAY = datetime.timedelta(days=1)
+
 CACHE_DIR = pathlib.Path(__file__).parent / 'cache'
 CACHE_DIR = pathlib.Path(__file__).parent / 'cache'
 client = httpx.Client(transport=httpx.HTTPTransport(http2=True, retries=2), timeout=10)
 client = httpx.Client(transport=httpx.HTTPTransport(http2=True, retries=2), timeout=10)
 
 
 @typing.overload
 @typing.overload
-def get(url: str, *, json: typing.Literal[True]=True, headers: httpx._types.HeaderTypes|None=None) -> typing.Any:
+def get(url: str, *, json: typing.Literal[True]=True, headers: httpx._types.HeaderTypes|None=None,
+		expiry: datetime.timedelta=datetime.timedelta(minutes=10)) -> typing.Any:
 	...
 	...
 @typing.overload
 @typing.overload
-def get(url: str, *, json: typing.Literal[False], headers: httpx._types.HeaderTypes|None=None) -> str:
+def get(url: str, *, json: typing.Literal[False], headers: httpx._types.HeaderTypes|None=None,
+		expiry: datetime.timedelta=datetime.timedelta(minutes=10)) -> str:
 	...
 	...
-def get(url: str, *, json=True, headers=None) -> typing.Any:
+def get(url: str, *, json=True, headers=None, expiry=datetime.timedelta(minutes=10)) -> typing.Any:
 	parsed = urllib.parse.urlparse(url)
 	parsed = urllib.parse.urlparse(url)
 	assert parsed.hostname is not None
 	assert parsed.hostname is not None
 	cache_filename = urllib.parse.quote(parsed.path.removeprefix('/'), safe='')
 	cache_filename = urllib.parse.quote(parsed.path.removeprefix('/'), safe='')
@@ -29,7 +34,7 @@ def get(url: str, *, json=True, headers=None) -> typing.Any:
 	cache_path = CACHE_DIR / parsed.hostname / cache_filename
 	cache_path = CACHE_DIR / parsed.hostname / cache_filename
 
 
 	try:
 	try:
-		if cache_path.stat().st_mtime > time.time() - 600: # less than 10 minutes old
+		if cache_path.stat().st_mtime > time.time() - expiry.total_seconds(): # less than 10 minutes old
 			with lzma.open(cache_path, 'rb') as f:
 			with lzma.open(cache_path, 'rb') as f:
 				if json:
 				if json:
 					return cbor2.load(f)
 					return cbor2.load(f)

+ 3 - 2
company.py

@@ -19,7 +19,7 @@ def main() -> None:
 			if cogc is not None:
 			if cogc is not None:
 				cogc_planets[cogc].append(planet['PlanetName'])
 				cogc_planets[cogc].append(planet['PlanetName'])
 
 
-	buildings: typing.Sequence[Building] = cache.get('https://rest.fnar.net/building/allbuildings')
+	buildings: typing.Sequence[Building] = cache.get('https://rest.fnar.net/building/allbuildings', expiry=cache.ONE_DAY)
 	experts: dict[str, str] = {}
 	experts: dict[str, str] = {}
 	for building in buildings:
 	for building in buildings:
 		for recipe in building['Recipes']:
 		for recipe in building['Recipes']:
@@ -33,7 +33,8 @@ def main() -> None:
 		print(mat, expertise, ', '.join(cogc_planets.get(expertise, []))) # pyright: ignore[reportArgumentType, reportCallIssue]
 		print(mat, expertise, ', '.join(cogc_planets.get(expertise, []))) # pyright: ignore[reportArgumentType, reportCallIssue]
 
 
 def iter_planet_cogc() -> typing.Iterator[tuple[Planet, Expertise | None]]:
 def iter_planet_cogc() -> typing.Iterator[tuple[Planet, Expertise | None]]:
-	all_planets: typing.Collection[Planet] = cache.get('https://universemap.taiyibureau.de/planet_data.json')
+	all_planets: typing.Collection[Planet] = cache.get('https://universemap.taiyibureau.de/planet_data.json',
+			expiry=cache.ONE_DAY)
 	for planet in all_planets:
 	for planet in all_planets:
 		cogc = None
 		cogc = None
 		if len(cogcs := planet['COGCPrograms']) > 1:
 		if len(cogcs := planet['COGCPrograms']) > 1:

+ 3 - 2
integration.py

@@ -60,7 +60,8 @@ def main() -> None:
 
 
 def pmmg_monthly_report() -> dict[str, dict[str, CompanyOutput]]:
 def pmmg_monthly_report() -> dict[str, dict[str, CompanyOutput]]:
 	report_constants = cache.get(
 	report_constants = cache.get(
-			'https://raw.githubusercontent.com/PMMG-Products/pmmg-products.github.io/main/reports/src/staticData/constants.ts', json=False)
+			'https://raw.githubusercontent.com/PMMG-Products/pmmg-products.github.io/main/reports/src/staticData/constants.ts',
+			json=False, expiry=cache.ONE_DAY)
 	# export const months = ["mar25", "apr25", ..., "dec25", "jan26"];
 	# export const months = ["mar25", "apr25", ..., "dec25", "jan26"];
 	match = re.search(r'export const months = \[(.*?)\];', report_constants)
 	match = re.search(r'export const months = \[(.*?)\];', report_constants)
 	assert match
 	assert match
@@ -68,7 +69,7 @@ def pmmg_monthly_report() -> dict[str, dict[str, CompanyOutput]]:
 	months = [m.strip().strip('"') for m in months_str.split(',')]
 	months = [m.strip().strip('"') for m in months_str.split(',')]
 	last_month = months[-1]
 	last_month = months[-1]
 	print('getting report for', last_month)
 	print('getting report for', last_month)
-	return cache.get(f'https://pmmg-products.github.io/reports/data/company-data-{last_month}.json')['individual']
+	return cache.get(f'https://pmmg-products.github.io/reports/data/company-data-{last_month}.json', expiry=cache.ONE_DAY)['individual']
 
 
 class CompanyOutput(typing.TypedDict):
 class CompanyOutput(typing.TypedDict):
 	amount: int
 	amount: int

+ 2 - 2
mat_competitors.py

@@ -36,7 +36,7 @@ def main() -> None:
 			coid_bases[base['OwnerId']].append(planet)
 			coid_bases[base['OwnerId']].append(planet)
 
 
 	coid_users: dict[str, str] = {company_id: d['Username']
 	coid_users: dict[str, str] = {company_id: d['Username']
-		for company_id, d in cache.get('https://pmmg-products.github.io/reports/data/knownCompanies.json').items()}
+		for company_id, d in cache.get('https://pmmg-products.github.io/reports/data/knownCompanies.json', expiry=cache.ONE_DAY).items()}
 
 
 	for company_id, co_production in integration.pmmg_monthly_report().items():
 	for company_id, co_production in integration.pmmg_monthly_report().items():
 		if (mat_production := co_production.get(ticker)) is None:
 		if (mat_production := co_production.get(ticker)) is None:
@@ -48,7 +48,7 @@ def main() -> None:
 			print(f'{mat_production["amount"]:10.1f}', ', '.join(bases))
 			print(f'{mat_production["amount"]:10.1f}', ', '.join(bases))
 
 
 def iter_expertise(ticker: str) -> typing.Iterator[str]:
 def iter_expertise(ticker: str) -> typing.Iterator[str]:
-	buildings: typing.Sequence[company.Building] = cache.get('https://rest.fnar.net/building/allbuildings')
+	buildings: typing.Sequence[company.Building] = cache.get('https://rest.fnar.net/building/allbuildings', expiry=cache.ONE_DAY)
 	for building in buildings:
 	for building in buildings:
 		for recipe in building['Recipes']:
 		for recipe in building['Recipes']:
 			for output in recipe['Outputs']:
 			for output in recipe['Outputs']: