新增官方响应接口

This commit is contained in:
Bigsk 2021-06-14 15:35:53 +08:00
parent a492b5fe08
commit 3e02140e97
3 changed files with 225 additions and 103 deletions

View File

@ -17,6 +17,7 @@ import (
"io"
"crypto/sha256"
"encoding/hex"
"path"
)
var WorkPath,DataPath string
@ -51,12 +52,36 @@ func responseWiKi(w http.ResponseWriter,r *http.Request) {
b,_:=json.Marshal(returnInfo)
w.Write(b)
}
func responseFile(w http.ResponseWriter,r *http.Request,value []string) {
path:=DataPath
func responseOfficial(w http.ResponseWriter,r *http.Request,value []string) {
paths:=""
for _,arg := range value{
paths=paths+"/"+arg
}
if paths=="" || paths=="/"{
responseError(w,r,400,"Please offer more params",10005)
}else{
switcher:=true
for _,arg := range Database{
if arg["path"]==paths{
address:=determineNode(arg["hash"],path.Base(arg["path"]))
w.Header().Set("Location",address)
w.WriteHeader(302)
switcher=false
break
}
}
if switcher==true{
responseError(w,r,404,"Cannot find file",10003)
}
}
}
func responseFile(w http.ResponseWriter,r *http.Request,value []string) {
fileName:=value[0]
path:=DataPath
for _,arg := range value[1:]{
path=path+"/"+arg
}
if path==""{
if path==DataPath{
responseError(w,r,403,"Directory listing is not allowed",10002)
}else{
b,err:=ioutil.ReadFile(path)
@ -70,6 +95,8 @@ func responseFile(w http.ResponseWriter,r *http.Request,value []string) {
logIn("[Page Thread]","Read the file ",path," failed in data dir! Error info: ",err.Error())
}
}else{
w.Header().Set("Content-Disposition","attachment;filename="+fileName)
w.Header().Set("Content-Type","application/octet-stream")
w.Write(b)
}
}
@ -92,9 +119,9 @@ func pageService(w http.ResponseWriter,r *http.Request) {
responseICON(w,r)
case "file":
responseFile(w,r,value)
/*
case "official":
responseOfficial(w,r,value)
/*
case "bmclapi":
responseBMCLAPI(w,r,value)
case "default":
@ -109,6 +136,7 @@ func pageService(w http.ResponseWriter,r *http.Request) {
}
func databaseService() {
for {
defer recover()
file,err:=os.OpenFile(WorkPath+"/database.db",os.O_RDONLY,0600)
defer file.Close()
if err != nil {
@ -128,8 +156,8 @@ func databaseService() {
func nodeService() {
}
func determineNode(file string) string {
return "http://127.0.0.1/file"+file
func determineNode(hash string,fileName string) string {
return "http://127.0.0.1:8000/file/"+fileName+"/"+hash
}
func syncService() {
for {
@ -213,4 +241,5 @@ func main() {
10002 Directory listing is not allowed
10003 Cannot find file
10004 The file exists but cannot be read
10005 Please offer more params
*/

190
master/sync.py Normal file
View File

@ -0,0 +1,190 @@
'''
Copyright Ghink Network Studio
Website: https://www.ghink.net
'''
from io import BufferedRandom
import urllib
import requests,os,time,json,threading,hashlib,re
from urllib import parse
from bs4 import BeautifulSoup as bs
db=[]
workPath="gmclapi/"
dataPath=workPath+"data/"
userAgent={'User-Agent':'GMCLAPI/0.0.1'}
proxies = {
"http": "http://127.0.0.1:4780"
}
entrance=["http://launchermeta.mojang.com/mc/game/version_manifest.json","http://dl.liteloader.com/versions/versions.json"]
threadLock=[200,0]
def log(info):
info="{}{}".format(time.strftime("%Y/%m/%d %H:%M:%S [Sync Thread]", time.localtime()),info)
print(info)
with open(workPath+"logs.log","a+") as fb:
fb.write(info+"\n")
def database():
threadLock[1]+=1
global db
with open(workPath+"database.db","r") as fb:
db=json.loads(fb.read())
while True:
with open(workPath+"database.db","r") as fb:
if json.loads(fb.read())==db:
continue
with open(workPath+"database.db","w+") as fb:
fb.write(json.dumps(db))
def syncMain():
global db
for obj in entrance:
while True:
try:
origin=requests.get(obj,headers=userAgent,proxies=proxies).content
md5=hashlib.md5()
md5.update(origin)
hash=md5.hexdigest()
switch=True
for h in db:
if h["hash"]==hash:
switch=False
break
if switch:
log("Synchronizing the file {} from {}".format(parse.urlparse(obj).path,obj))
i=0
for h in db:
if h["path"]==parse.urlparse(obj).path:
del switch[i]
i+=1
with open(dataPath+hash,"wb") as fb:
fb.write(origin)
db.append({
"hash":hash,
"source":obj,
"path":parse.urlparse(obj).path
})
break
except:
pass
def syncJson():
synced=[]
def syncThread(rec):
global db
threadLock[1]+=1
with open(dataPath+rec["hash"],"r") as fb:
fall=re.findall('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+',fb.read())
for obj in fall:
if parse.urlparse(obj).path=="/":
continue
while True:
try:
switch=True
for r in db:
if r["path"]==parse.urlparse(obj).path:
switch=False
break
if switch:
log("Synchronizing the file {} from {}".format(parse.urlparse(obj).path,obj))
origin=requests.get(obj,headers=userAgent,proxies=proxies).content
md5=hashlib.md5()
md5.update(origin)
hash=md5.hexdigest()
with open(dataPath+hash,"wb") as fb:
fb.write(origin)
db.append({
"hash":hash,
"source":obj,
"path":parse.urlparse(obj).path
})
break
except:
pass
threadLock[1]-=1
while True:
i=0
for rec in db:
if ".json" in rec["path"] and rec["path"] not in synced:
i+=1
if threadLock[0]>threadLock[1]:
synced.append(rec["path"])
threading.Thread(target=syncThread,args=(rec,)).start()
if i==0:
break
log("Synchronizing for json list finished")
def syncForge(entrance="https://files.minecraftforge.net/net/minecraftforge/forge/"):
global db
crawed=[]
def syncThread(entrance):
global db
threadLock[1]+=1
log("Crawling the page {}".format(entrance))
page=requests.get(entrance,headers=userAgent,proxies=proxies).text
soup=bs(page,features="html5lib")
pageurls=soup.find_all("a",href=True)
for obj in pageurls:
link=obj.get("href")
if ".html" in link and "http" not in link and link not in crawed:
crawed.append(link)
threading.Thread(target=syncThread,args=(parse.urljoin(entrance,link),)).start()
fall=re.findall('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+',page)
for obj in fall:
switch=True
for h in db:
if h["path"]==parse.urlparse(obj).path or h["path"]==parse.urlparse(obj[48:]).path:
switch=False
break
if switch:
if ".jar" in obj or ".zip" in obj:
log("Synchronizing the file {} from {}".format(parse.urlparse(obj[48:]).path,obj[48:]))
while True:
try:
origin=requests.get(obj[48:],headers=userAgent,proxies=proxies).content
md5=hashlib.md5()
md5.update(origin)
hash=md5.hexdigest()
with open(dataPath+hash,"wb") as fb:
fb.write(origin)
db.append({
"hash":hash,
"source":obj[48:],
"path":parse.urlparse(obj[48:]).path
})
break
except:
log("Synchronizing the file {} from {} failed!Retrying...".format(parse.urlparse(obj[48:]).path,obj[48:]))
time.sleep(10)
elif ".txt" in link:
log("Synchronizing the file {} from {}".format(parse.urlparse(obj).path,obj))
while True:
try:
origin=requests.get(obj,headers=userAgent,proxies=proxies).content
md5=hashlib.md5()
md5.update(origin)
hash=md5.hexdigest()
with open(dataPath+hash,"wb") as fb:
fb.write(origin)
db.append({
"hash":hash,
"source":obj,
"path":parse.urlparse(obj).path
})
break
except:
log("Synchronizing the file {} from {} failed!Retrying...".format(parse.urlparse(obj).path,obj))
time.sleep(10)
threadLock[1]-=1
syncThread(entrance)
def monitor():
while True:
log(str(threadLock))
time.sleep(1)
def main():
threading.Thread(target=database).start()
#threading.Thread(target=monitor).start()
time.sleep(1)
while True:
time.sleep(60*60*24)
syncMain()
threading.Thread(target=syncJson()).start()
threading.Thread(target=syncForge()).start()
if __name__=="__main__":
main()

View File

@ -1,97 +0,0 @@
'''
Copyright Ghink Network Studio
Website: https://www.ghink.net
'''
import requests,os,time,json,threading,hashlib,re
from urllib.parse import urlparse
db=[]
workPath="gmclapi/"
dataPath=workPath+"data/"
userAgent={'User-Agent':'GMCLAPI/0.0.1'}
proxies = {
"http": "http://127.0.0.1:4780"
}
entrance=["http://launchermeta.mojang.com/mc/game/version_manifest.json"]
def log(info):
info="{}{}".format(time.strftime("%Y/%m/%d %H:%M:%S [Sync Thread]", time.localtime()),info)
print(info)
with open(workPath+"logs.log","a+") as fb:
fb.write(info)
fb.write("\r\n")
def database():
global db
with open(workPath+"database.db","r") as fb:
db=json.loads(fb.read())
while True:
with open(workPath+"database.db","r") as fb:
if json.loads(fb.read())==db:
continue
with open(workPath+"database.db","w+") as fb:
fb.write(json.dumps(db))
def syncMain():
global entrance
global userAgent
global proxies
for obj in entrance:
origin=requests.get(obj,headers=userAgent,proxies=proxies).content
md5=hashlib.md5()
md5.update(origin)
hash=md5.hexdigest()
switch=True
for h in db:
if h["hash"]==hash:
switch=False
break
if switch:
i=0
for h in db:
if h["path"]==urlparse(obj).path:
del switch[i]
i+=1
with open(dataPath+hash,"wb") as fb:
fb.write(origin)
db.append({
"hash":hash,
"source":obj,
"path":urlparse(obj).path
})
def syncMinecraft():
global userAgent
global proxies
for h in db:
if h["path"]=="/mc/game/version_manifest.json":
hash=h["hash"]
break
with open(dataPath+hash,"r") as fb:
fall=fb.read()
result=re.findall('(https?|ftp|file)|([-A-Za-z0-9+&@#/%?=~_|!:,.;]+[-A-Za-z0-9+&@#/%=~_|])',fall)
print(result)
res=[]
for i in result:
res.append("{}{}".format(i[0]+i[1]))
for obj in res:
switch=True
for h in db:
if h["source"]==obj:
switch=False
if switch:
origin=requests.get(obj,headers=userAgent,proxies=proxies).content
md5=hashlib.md5()
md5.update(origin)
hash=md5.hexdigest()
with open(dataPath+hash,"wb") as fb:
fb.write(origin)
db.append({
"hash":hash,
"source":obj,
"path":urlparse(obj).path
})
def main():
threading.Thread(target=database).start()
syncMain()
#main()
threading.Thread(target=database).start()
time.sleep(1)
syncMinecraft()