performance upgrades

main
Gabriel 2 years ago
parent b294032487
commit 3ac454eda2

@ -1,5 +1,9 @@
function fetchback(url,cback){
fetch(url).then((request)=>request.json()).then((data)=>cback(data));
}
function interverse_data(url,cback){
url = interverse_proxy+"?url="+url
url = interverse_proxy+"/initial?url="+url
fetch(url).then((response)=> {
if (!response.ok){
return {}

@ -15,6 +15,10 @@
#interverse-details>*{
margin:1rem;
}
#interverse-details img{
max-width:20vw;
max-height:50vh;
}
#interverse-resource-groups{
display: flex;
flex-direction: row;
@ -180,7 +184,7 @@ a{
<template x-for="connection in Alpine.store('data')['connections']">
<div class="interverse-connection">
<template x-if="Alpine.store(connection)['name']">
<div x-on:click="initialize(connection)">
<div x-on:click="initialize(connection)" class='interverse-connection-preview'>
<h3 x-text="Alpine.store(connection)['name']"></h3>
<template x-if="Alpine.store(connection)['image']!=''">
<img x-bind:src="Alpine.store(connection)['image']">
@ -217,15 +221,23 @@ a{
initialize(main_url);
});
/*
Alpine.store('data',data['main'])
for (c in data['connections']){
Alpine.store(c,data['connections'][c])
}
*/
function initialize(url) {
Alpine.store("data", {});
interverse_data(url.replace("https://",'').replace('http://','').replace('/',''), function (data) {
console.log("Initializing interverse...")
Alpine.store('data',data['main'])
Alpine.store('data',data);
});
fetchback(interverse_proxy+'/complete?url='+url.replace("https://",'').replace('http://','').replace('/',''),function(data){
for (c in data['connections']){
Alpine.store(c,data['connections'][c])
}
});
}
</script>

@ -4,15 +4,27 @@ import simple_cache
app = Flask('interverse-proxy')
cache = simple_cache.Cache()
print("Cache intitialized.")
app = Flask('interverse-proxy')
@app.route('/')
def index():
return redirect("https://codeberg.org/gabe/Interverse",307)
@app.route("/initial", methods=['GET'])
def initial():
url = request.args.get('url')
if url == None:
return redirect("https://codeberg.org/gabe/Interverse",307)
data = cache.load_data(url)
return json.dumps(data)
@app.route("/", methods=['GET'])
def interverse_proxy():
@app.route("/complete",methods=['GET'])
def complete():
url = request.args.get('url')
if url == None:
return redirect("https://codeberg.org/gabe/Interverse",307)
return "See <a href='https://codeberg.org/gabe/Interverse'>Interverse</a>"
data = cache.get_interverse_data(url)
return json.dumps(data)

@ -1,28 +1,42 @@
import requests,time,json
ideal_delta = 60*5 #5 minutes
import requests
import time,json,os
from joblib import Parallel, delayed
crawler_header = {'User-agent': 'interverse-crawler','info':'https://libresolutions.network/videos/interverse-demo-1/'}
schemes = ['http://','https://']
ideal_delta = 60 * 15 # 15 minutes
request_timeout = 0.
cache_origin = 'https://libresolutions.network'
crawler_header = {'User-agent': 'interverse-crawler',
'info': 'https://libresolutions.network/videos/interverse-demo-1/'}
schemes = ['https://']
locations = [
'/.well-known/discover.json',
'/.well-known/interverse',
'/interverse.json',
'/discover.json'
]
'/interverse.json'
]
def dictify(lst):
dat = {}
for i in lst:
if i != None:
dat[i['location']]=i
return dat
class Cache:
def __init__(self,delta=None):
if delta==None:
def __init__(self, delta=None):
if delta == None:
self.delta = ideal_delta
else:
self.delta = delta
self.links={}
self.links = {}
self.build_cache()
# link = key:{data,time}
def load_data(self,url):
def load_data(self, url):
print(f"Loading interverse data for :{url}")
data = None
t = time.time()
if url in self.links:
@ -32,53 +46,74 @@ class Cache:
for s in schemes:
for l in locations:
try:
data = requests.get(s+url+l,headers=crawler_header,timeout=3).json()
data = requests.get(
s+url.replace(
'https://', '').replace('http://', '').replace("/", '')+l, headers=crawler_header, timeout=1).json()
if l.find('discover'):
#translate discover to interverse
data = json.loads(json.dumps(data).replace("preview_connections","connection_groups"))
# translate discover to interverse
data = json.loads(json.dumps(data).replace(
"preview_connections", "connection_groups"))
print(f"Interverse connection found at {l}")
t = time.time()
t = time.time()
self.links[url] = {
'time':t,
'data':data,
'time': t,
'data': data,
}
return data
except:
pass
if data != None:
t = time.time()
t = time.time()
self.links[url] = {
'time':t,
'data':data,
'time': t+ideal_delta,
'data': data,
}
if data == None:
#If no data is returned, wait longer before attempting again
# If no data is returned, wait longer before attempting again
self.links[url] = {
'data':None,
'time':t+ideal_delta
'data': None,
'time': t+60*60 #1 hour
}
return data
def get_interverse_data(self,url):
def get_interverse_data(self, url):
origin = self.load_data(url)
connections = {}
for con in origin['connections']:
dat = self.load_data(con.replace('https://','').replace('http://','').replace("/",''))
if dat != None:
connections[con] = dat
for g in origin['connection_groups']:
for con in origin['connection_groups'][g]:
dat = self.load_data(con.replace('https://','').replace('http://','').replace("/",''))
if dat != None:
connections[con] = dat
return {
'main':origin,
'connections':connections
connections = []
try:
for con in origin['connections']:
connections.append(con)
except:
pass
try:
for g in origin['connection_groups']:
for con in origin['connection_groups'][g]:
connections.append(con)
except:
pass
c = Parallel()(delayed(self.load_data)(i) for i in connections)
return{
'main': origin,
'connections': dictify(c)
}
def build_cache(self):
print("Building cache..\nThis may take some time")
origin = self.load_data(cache_origin)
connections = []
try:
for con in origin['connections']:
connections.append(con)
except:
pass
try:
for g in origin['connection_groups']:
for con in origin['connection_groups'][g]:
connections.append(con)
except:
pass
c = Parallel()(delayed(self.get_interverse_data)(i) for i in connections)
if __name__ == '__main__':
c = Cache()
if __name__ == '__main__':
cache = Cache()

@ -0,0 +1,2 @@
#! /bin/bash
uwsgi --http 0.0.0.0:5000 -w interverse-proxy:app
Loading…
Cancel
Save