performance upgrades
This commit is contained in:
parent
b294032487
commit
3ac454eda2
5 changed files with 116 additions and 51 deletions
|
@ -1,5 +1,9 @@
|
|||
function fetchback(url,cback){
|
||||
fetch(url).then((request)=>request.json()).then((data)=>cback(data));
|
||||
}
|
||||
|
||||
function interverse_data(url,cback){
|
||||
url = interverse_proxy+"?url="+url
|
||||
url = interverse_proxy+"/initial?url="+url
|
||||
fetch(url).then((response)=> {
|
||||
if (!response.ok){
|
||||
return {}
|
||||
|
|
|
@ -15,6 +15,10 @@
|
|||
#interverse-details>*{
|
||||
margin:1rem;
|
||||
}
|
||||
#interverse-details img{
|
||||
max-width:20vw;
|
||||
max-height:50vh;
|
||||
}
|
||||
#interverse-resource-groups{
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
|
@ -180,7 +184,7 @@ a{
|
|||
<template x-for="connection in Alpine.store('data')['connections']">
|
||||
<div class="interverse-connection">
|
||||
<template x-if="Alpine.store(connection)['name']">
|
||||
<div x-on:click="initialize(connection)">
|
||||
<div x-on:click="initialize(connection)" class='interverse-connection-preview'>
|
||||
<h3 x-text="Alpine.store(connection)['name']"></h3>
|
||||
<template x-if="Alpine.store(connection)['image']!=''">
|
||||
<img x-bind:src="Alpine.store(connection)['image']">
|
||||
|
@ -217,15 +221,23 @@ a{
|
|||
initialize(main_url);
|
||||
});
|
||||
|
||||
function initialize(url) {
|
||||
Alpine.store("data", {});
|
||||
interverse_data(url.replace("https://",'').replace('http://','').replace('/',''), function (data) {
|
||||
console.log("Initializing interverse...")
|
||||
/*
|
||||
Alpine.store('data',data['main'])
|
||||
for (c in data['connections']){
|
||||
Alpine.store(c,data['connections'][c])
|
||||
}
|
||||
*/
|
||||
|
||||
function initialize(url) {
|
||||
Alpine.store("data", {});
|
||||
interverse_data(url.replace("https://",'').replace('http://','').replace('/',''), function (data) {
|
||||
console.log("Initializing interverse...")
|
||||
Alpine.store('data',data);
|
||||
});
|
||||
fetchback(interverse_proxy+'/complete?url='+url.replace("https://",'').replace('http://','').replace('/',''),function(data){
|
||||
for (c in data['connections']){
|
||||
Alpine.store(c,data['connections'][c])
|
||||
}
|
||||
});
|
||||
}
|
||||
</script>
|
|
@ -4,15 +4,27 @@ import simple_cache
|
|||
|
||||
|
||||
|
||||
app = Flask('interverse-proxy')
|
||||
cache = simple_cache.Cache()
|
||||
print("Cache intitialized.")
|
||||
app = Flask('interverse-proxy')
|
||||
|
||||
@app.route("/", methods=['GET'])
|
||||
def interverse_proxy():
|
||||
@app.route('/')
|
||||
def index():
|
||||
return redirect("https://codeberg.org/gabe/Interverse",307)
|
||||
|
||||
@app.route("/initial", methods=['GET'])
|
||||
def initial():
|
||||
url = request.args.get('url')
|
||||
if url == None:
|
||||
return redirect("https://codeberg.org/gabe/Interverse",307)
|
||||
data = cache.load_data(url)
|
||||
return json.dumps(data)
|
||||
|
||||
@app.route("/complete",methods=['GET'])
|
||||
def complete():
|
||||
url = request.args.get('url')
|
||||
if url == None:
|
||||
return redirect("https://codeberg.org/gabe/Interverse",307)
|
||||
return "See <a href='https://codeberg.org/gabe/Interverse'>Interverse</a>"
|
||||
data = cache.get_interverse_data(url)
|
||||
return json.dumps(data)
|
||||
|
||||
|
|
|
@ -1,16 +1,29 @@
|
|||
import requests,time,json
|
||||
ideal_delta = 60*5 #5 minutes
|
||||
import requests
|
||||
import time,json,os
|
||||
from joblib import Parallel, delayed
|
||||
|
||||
crawler_header = {'User-agent': 'interverse-crawler','info':'https://libresolutions.network/videos/interverse-demo-1/'}
|
||||
schemes = ['http://','https://']
|
||||
ideal_delta = 60 * 15 # 15 minutes
|
||||
request_timeout = 0.
|
||||
cache_origin = 'https://libresolutions.network'
|
||||
|
||||
|
||||
|
||||
crawler_header = {'User-agent': 'interverse-crawler',
|
||||
'info': 'https://libresolutions.network/videos/interverse-demo-1/'}
|
||||
schemes = ['https://']
|
||||
locations = [
|
||||
'/.well-known/discover.json',
|
||||
'/.well-known/interverse',
|
||||
'/interverse.json',
|
||||
'/discover.json'
|
||||
'/interverse.json'
|
||||
]
|
||||
|
||||
|
||||
def dictify(lst):
|
||||
dat = {}
|
||||
for i in lst:
|
||||
if i != None:
|
||||
dat[i['location']]=i
|
||||
return dat
|
||||
|
||||
class Cache:
|
||||
def __init__(self, delta=None):
|
||||
|
@ -19,10 +32,11 @@ class Cache:
|
|||
else:
|
||||
self.delta = delta
|
||||
self.links = {}
|
||||
self.build_cache()
|
||||
# link = key:{data,time}
|
||||
|
||||
|
||||
def load_data(self, url):
|
||||
print(f"Loading interverse data for :{url}")
|
||||
data = None
|
||||
t = time.time()
|
||||
if url in self.links:
|
||||
|
@ -32,24 +46,26 @@ class Cache:
|
|||
for s in schemes:
|
||||
for l in locations:
|
||||
try:
|
||||
data = requests.get(s+url+l,headers=crawler_header,timeout=3).json()
|
||||
data = requests.get(
|
||||
s+url.replace(
|
||||
'https://', '').replace('http://', '').replace("/", '')+l, headers=crawler_header, timeout=1).json()
|
||||
if l.find('discover'):
|
||||
# translate discover to interverse
|
||||
data = json.loads(json.dumps(data).replace("preview_connections","connection_groups"))
|
||||
data = json.loads(json.dumps(data).replace(
|
||||
"preview_connections", "connection_groups"))
|
||||
print(f"Interverse connection found at {l}")
|
||||
t = time.time()
|
||||
self.links[url] = {
|
||||
'time': t,
|
||||
'data': data,
|
||||
}
|
||||
|
||||
return data
|
||||
except:
|
||||
pass
|
||||
if data != None:
|
||||
t = time.time()
|
||||
self.links[url] = {
|
||||
'time':t,
|
||||
'time': t+ideal_delta,
|
||||
'data': data,
|
||||
}
|
||||
|
||||
|
@ -57,28 +73,47 @@ class Cache:
|
|||
# If no data is returned, wait longer before attempting again
|
||||
self.links[url] = {
|
||||
'data': None,
|
||||
'time':t+ideal_delta
|
||||
'time': t+60*60 #1 hour
|
||||
}
|
||||
return data
|
||||
|
||||
def get_interverse_data(self, url):
|
||||
origin = self.load_data(url)
|
||||
connections = {}
|
||||
connections = []
|
||||
try:
|
||||
for con in origin['connections']:
|
||||
dat = self.load_data(con.replace('https://','').replace('http://','').replace("/",''))
|
||||
if dat != None:
|
||||
connections[con] = dat
|
||||
connections.append(con)
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
for g in origin['connection_groups']:
|
||||
for con in origin['connection_groups'][g]:
|
||||
dat = self.load_data(con.replace('https://','').replace('http://','').replace("/",''))
|
||||
if dat != None:
|
||||
connections[con] = dat
|
||||
connections.append(con)
|
||||
except:
|
||||
pass
|
||||
c = Parallel()(delayed(self.load_data)(i) for i in connections)
|
||||
return{
|
||||
'main': origin,
|
||||
'connections':connections
|
||||
'connections': dictify(c)
|
||||
}
|
||||
|
||||
def build_cache(self):
|
||||
print("Building cache..\nThis may take some time")
|
||||
origin = self.load_data(cache_origin)
|
||||
connections = []
|
||||
try:
|
||||
for con in origin['connections']:
|
||||
connections.append(con)
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
for g in origin['connection_groups']:
|
||||
for con in origin['connection_groups'][g]:
|
||||
connections.append(con)
|
||||
except:
|
||||
pass
|
||||
c = Parallel()(delayed(self.get_interverse_data)(i) for i in connections)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
c = Cache()
|
||||
|
||||
|
||||
cache = Cache()
|
||||
|
|
2
server/start.sh
Executable file
2
server/start.sh
Executable file
|
@ -0,0 +1,2 @@
|
|||
#! /bin/bash
|
||||
uwsgi --http 0.0.0.0:5000 -w interverse-proxy:app
|
Loading…
Reference in a new issue