performance upgrades
This commit is contained in:
parent
b294032487
commit
3ac454eda2
5 changed files with 116 additions and 51 deletions
|
@ -1,5 +1,9 @@
|
||||||
|
function fetchback(url,cback){
|
||||||
|
fetch(url).then((request)=>request.json()).then((data)=>cback(data));
|
||||||
|
}
|
||||||
|
|
||||||
function interverse_data(url,cback){
|
function interverse_data(url,cback){
|
||||||
url = interverse_proxy+"?url="+url
|
url = interverse_proxy+"/initial?url="+url
|
||||||
fetch(url).then((response)=> {
|
fetch(url).then((response)=> {
|
||||||
if (!response.ok){
|
if (!response.ok){
|
||||||
return {}
|
return {}
|
||||||
|
|
|
@ -15,6 +15,10 @@
|
||||||
#interverse-details>*{
|
#interverse-details>*{
|
||||||
margin:1rem;
|
margin:1rem;
|
||||||
}
|
}
|
||||||
|
#interverse-details img{
|
||||||
|
max-width:20vw;
|
||||||
|
max-height:50vh;
|
||||||
|
}
|
||||||
#interverse-resource-groups{
|
#interverse-resource-groups{
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: row;
|
flex-direction: row;
|
||||||
|
@ -180,7 +184,7 @@ a{
|
||||||
<template x-for="connection in Alpine.store('data')['connections']">
|
<template x-for="connection in Alpine.store('data')['connections']">
|
||||||
<div class="interverse-connection">
|
<div class="interverse-connection">
|
||||||
<template x-if="Alpine.store(connection)['name']">
|
<template x-if="Alpine.store(connection)['name']">
|
||||||
<div x-on:click="initialize(connection)">
|
<div x-on:click="initialize(connection)" class='interverse-connection-preview'>
|
||||||
<h3 x-text="Alpine.store(connection)['name']"></h3>
|
<h3 x-text="Alpine.store(connection)['name']"></h3>
|
||||||
<template x-if="Alpine.store(connection)['image']!=''">
|
<template x-if="Alpine.store(connection)['image']!=''">
|
||||||
<img x-bind:src="Alpine.store(connection)['image']">
|
<img x-bind:src="Alpine.store(connection)['image']">
|
||||||
|
@ -217,15 +221,23 @@ a{
|
||||||
initialize(main_url);
|
initialize(main_url);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
/*
|
||||||
|
Alpine.store('data',data['main'])
|
||||||
|
for (c in data['connections']){
|
||||||
|
Alpine.store(c,data['connections'][c])
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
function initialize(url) {
|
function initialize(url) {
|
||||||
Alpine.store("data", {});
|
Alpine.store("data", {});
|
||||||
interverse_data(url.replace("https://",'').replace('http://','').replace('/',''), function (data) {
|
interverse_data(url.replace("https://",'').replace('http://','').replace('/',''), function (data) {
|
||||||
console.log("Initializing interverse...")
|
console.log("Initializing interverse...")
|
||||||
Alpine.store('data',data['main'])
|
Alpine.store('data',data);
|
||||||
|
});
|
||||||
|
fetchback(interverse_proxy+'/complete?url='+url.replace("https://",'').replace('http://','').replace('/',''),function(data){
|
||||||
for (c in data['connections']){
|
for (c in data['connections']){
|
||||||
Alpine.store(c,data['connections'][c])
|
Alpine.store(c,data['connections'][c])
|
||||||
}
|
}
|
||||||
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
|
@ -4,15 +4,27 @@ import simple_cache
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
app = Flask('interverse-proxy')
|
|
||||||
cache = simple_cache.Cache()
|
cache = simple_cache.Cache()
|
||||||
|
print("Cache intitialized.")
|
||||||
|
app = Flask('interverse-proxy')
|
||||||
|
|
||||||
@app.route("/", methods=['GET'])
|
@app.route('/')
|
||||||
def interverse_proxy():
|
def index():
|
||||||
|
return redirect("https://codeberg.org/gabe/Interverse",307)
|
||||||
|
|
||||||
|
@app.route("/initial", methods=['GET'])
|
||||||
|
def initial():
|
||||||
|
url = request.args.get('url')
|
||||||
|
if url == None:
|
||||||
|
return redirect("https://codeberg.org/gabe/Interverse",307)
|
||||||
|
data = cache.load_data(url)
|
||||||
|
return json.dumps(data)
|
||||||
|
|
||||||
|
@app.route("/complete",methods=['GET'])
|
||||||
|
def complete():
|
||||||
url = request.args.get('url')
|
url = request.args.get('url')
|
||||||
if url == None:
|
if url == None:
|
||||||
return redirect("https://codeberg.org/gabe/Interverse",307)
|
return redirect("https://codeberg.org/gabe/Interverse",307)
|
||||||
return "See <a href='https://codeberg.org/gabe/Interverse'>Interverse</a>"
|
|
||||||
data = cache.get_interverse_data(url)
|
data = cache.get_interverse_data(url)
|
||||||
return json.dumps(data)
|
return json.dumps(data)
|
||||||
|
|
||||||
|
|
|
@ -1,28 +1,42 @@
|
||||||
import requests,time,json
|
import requests
|
||||||
ideal_delta = 60*5 #5 minutes
|
import time,json,os
|
||||||
|
from joblib import Parallel, delayed
|
||||||
|
|
||||||
crawler_header = {'User-agent': 'interverse-crawler','info':'https://libresolutions.network/videos/interverse-demo-1/'}
|
ideal_delta = 60 * 15 # 15 minutes
|
||||||
schemes = ['http://','https://']
|
request_timeout = 0.
|
||||||
|
cache_origin = 'https://libresolutions.network'
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
crawler_header = {'User-agent': 'interverse-crawler',
|
||||||
|
'info': 'https://libresolutions.network/videos/interverse-demo-1/'}
|
||||||
|
schemes = ['https://']
|
||||||
locations = [
|
locations = [
|
||||||
'/.well-known/discover.json',
|
'/.well-known/discover.json',
|
||||||
'/.well-known/interverse',
|
'/.well-known/interverse',
|
||||||
'/interverse.json',
|
'/interverse.json'
|
||||||
'/discover.json'
|
]
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
|
def dictify(lst):
|
||||||
|
dat = {}
|
||||||
|
for i in lst:
|
||||||
|
if i != None:
|
||||||
|
dat[i['location']]=i
|
||||||
|
return dat
|
||||||
|
|
||||||
class Cache:
|
class Cache:
|
||||||
def __init__(self,delta=None):
|
def __init__(self, delta=None):
|
||||||
if delta==None:
|
if delta == None:
|
||||||
self.delta = ideal_delta
|
self.delta = ideal_delta
|
||||||
else:
|
else:
|
||||||
self.delta = delta
|
self.delta = delta
|
||||||
self.links={}
|
self.links = {}
|
||||||
|
self.build_cache()
|
||||||
# link = key:{data,time}
|
# link = key:{data,time}
|
||||||
|
|
||||||
|
|
||||||
def load_data(self,url):
|
def load_data(self, url):
|
||||||
|
print(f"Loading interverse data for :{url}")
|
||||||
data = None
|
data = None
|
||||||
t = time.time()
|
t = time.time()
|
||||||
if url in self.links:
|
if url in self.links:
|
||||||
|
@ -32,53 +46,74 @@ class Cache:
|
||||||
for s in schemes:
|
for s in schemes:
|
||||||
for l in locations:
|
for l in locations:
|
||||||
try:
|
try:
|
||||||
data = requests.get(s+url+l,headers=crawler_header,timeout=3).json()
|
data = requests.get(
|
||||||
|
s+url.replace(
|
||||||
|
'https://', '').replace('http://', '').replace("/", '')+l, headers=crawler_header, timeout=1).json()
|
||||||
if l.find('discover'):
|
if l.find('discover'):
|
||||||
#translate discover to interverse
|
# translate discover to interverse
|
||||||
data = json.loads(json.dumps(data).replace("preview_connections","connection_groups"))
|
data = json.loads(json.dumps(data).replace(
|
||||||
|
"preview_connections", "connection_groups"))
|
||||||
print(f"Interverse connection found at {l}")
|
print(f"Interverse connection found at {l}")
|
||||||
t = time.time()
|
t = time.time()
|
||||||
self.links[url] = {
|
self.links[url] = {
|
||||||
'time':t,
|
'time': t,
|
||||||
'data':data,
|
'data': data,
|
||||||
}
|
}
|
||||||
|
|
||||||
return data
|
return data
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
if data != None:
|
if data != None:
|
||||||
t = time.time()
|
t = time.time()
|
||||||
self.links[url] = {
|
self.links[url] = {
|
||||||
'time':t,
|
'time': t+ideal_delta,
|
||||||
'data':data,
|
'data': data,
|
||||||
}
|
}
|
||||||
|
|
||||||
if data == None:
|
if data == None:
|
||||||
#If no data is returned, wait longer before attempting again
|
# If no data is returned, wait longer before attempting again
|
||||||
self.links[url] = {
|
self.links[url] = {
|
||||||
'data':None,
|
'data': None,
|
||||||
'time':t+ideal_delta
|
'time': t+60*60 #1 hour
|
||||||
}
|
}
|
||||||
return data
|
return data
|
||||||
def get_interverse_data(self,url):
|
|
||||||
|
def get_interverse_data(self, url):
|
||||||
origin = self.load_data(url)
|
origin = self.load_data(url)
|
||||||
connections = {}
|
connections = []
|
||||||
for con in origin['connections']:
|
try:
|
||||||
dat = self.load_data(con.replace('https://','').replace('http://','').replace("/",''))
|
for con in origin['connections']:
|
||||||
if dat != None:
|
connections.append(con)
|
||||||
connections[con] = dat
|
except:
|
||||||
for g in origin['connection_groups']:
|
pass
|
||||||
for con in origin['connection_groups'][g]:
|
try:
|
||||||
dat = self.load_data(con.replace('https://','').replace('http://','').replace("/",''))
|
for g in origin['connection_groups']:
|
||||||
if dat != None:
|
for con in origin['connection_groups'][g]:
|
||||||
connections[con] = dat
|
connections.append(con)
|
||||||
return {
|
except:
|
||||||
'main':origin,
|
pass
|
||||||
'connections':connections
|
c = Parallel()(delayed(self.load_data)(i) for i in connections)
|
||||||
|
return{
|
||||||
|
'main': origin,
|
||||||
|
'connections': dictify(c)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def build_cache(self):
|
||||||
|
print("Building cache..\nThis may take some time")
|
||||||
|
origin = self.load_data(cache_origin)
|
||||||
|
connections = []
|
||||||
|
try:
|
||||||
|
for con in origin['connections']:
|
||||||
|
connections.append(con)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
for g in origin['connection_groups']:
|
||||||
|
for con in origin['connection_groups'][g]:
|
||||||
|
connections.append(con)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
c = Parallel()(delayed(self.get_interverse_data)(i) for i in connections)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
c = Cache()
|
cache = Cache()
|
||||||
|
|
||||||
|
|
||||||
|
|
2
server/start.sh
Executable file
2
server/start.sh
Executable file
|
@ -0,0 +1,2 @@
|
||||||
|
#! /bin/bash
|
||||||
|
uwsgi --http 0.0.0.0:5000 -w interverse-proxy:app
|
Loading…
Reference in a new issue