2020-08-09 14:08:44 +00:00
|
|
|
import os,requests,json,time,sys,argparse,psutil
|
|
|
|
Log=""
|
2020-08-25 14:40:22 +00:00
|
|
|
UA={'User-Agent':'GeMCAPI/6.0.0 (GeMC API Sync 6.0.0;Release)'}
|
2020-08-09 14:08:44 +00:00
|
|
|
parser=argparse.ArgumentParser()
|
2020-08-25 14:40:22 +00:00
|
|
|
parser.add_argument("--url")
|
|
|
|
parser.add_argument("--file")
|
|
|
|
parser.add_argument("--type")
|
|
|
|
parser.add_argument("--type_sub")
|
2020-08-09 14:08:44 +00:00
|
|
|
parser=parser.parse_args()
|
2020-08-25 14:40:22 +00:00
|
|
|
proxies={'http':'http://127.0.0.1:4780'}#Proxy Setting
|
2020-08-09 14:08:44 +00:00
|
|
|
def log(log_type,logvar):
|
|
|
|
global Log
|
|
|
|
if(log_type=="info"):
|
|
|
|
log_type="INFO"
|
|
|
|
elif(log_type=="warn"):
|
|
|
|
log_type="WARN"
|
|
|
|
elif(log_type=="error"):
|
|
|
|
log_type="ERROR"
|
|
|
|
Log=Log + '\n' + '[' + time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + ']' + '[' + log_type + ']' + logvar
|
2020-08-25 14:40:22 +00:00
|
|
|
def is_json(url):
|
|
|
|
global Log
|
|
|
|
global UA
|
|
|
|
global proxies
|
|
|
|
online_return=requests.get(url, headers=UA, proxies=proxies).text
|
|
|
|
try:
|
|
|
|
json.loads(online_return)
|
|
|
|
return True
|
|
|
|
except:
|
|
|
|
return False
|
2020-08-09 14:08:44 +00:00
|
|
|
def download(url, dirpath):
|
|
|
|
global Log
|
|
|
|
global UA
|
2020-08-25 14:40:22 +00:00
|
|
|
global proxies
|
|
|
|
global last_dirpath
|
|
|
|
global return_format
|
|
|
|
global return_headers
|
2020-08-09 14:08:44 +00:00
|
|
|
log("info",'We are downloading the file ' + dirpath + '.')
|
2020-08-25 14:40:22 +00:00
|
|
|
return_headers=requests.get(url, headers=UA, proxies=proxies).headers['content-type']
|
|
|
|
last_dirpath=''
|
|
|
|
make_dir_exist('file/' + parser.file[::-1].split('/', 1)[-1][::-1])
|
2020-08-09 14:08:44 +00:00
|
|
|
try:
|
2020-08-25 14:40:22 +00:00
|
|
|
if(return_headers=="application/json"):
|
|
|
|
return_format="json"
|
|
|
|
if('.json' in dirpath):
|
|
|
|
download=requests.get(url, headers=UA, proxies=proxies)
|
|
|
|
with open(dirpath,"wb") as code:
|
|
|
|
code.write(download.content)
|
|
|
|
else:
|
|
|
|
make_dir_exist(dirpath)
|
|
|
|
download=requests.get(url, headers=UA, proxies=proxies)
|
|
|
|
with open(dirpath + '/index.json',"wb") as code:
|
|
|
|
code.write(download.content)
|
|
|
|
last_dirpath='/index.json'
|
|
|
|
elif(return_headers=="application/json; charset=utf-8"):
|
|
|
|
return_format="json"
|
|
|
|
if('.json' in dirpath):
|
|
|
|
download=requests.get(url, headers=UA, proxies=proxies)
|
|
|
|
with open(dirpath,"wb") as code:
|
|
|
|
code.write(download.content)
|
|
|
|
else:
|
|
|
|
make_dir_exist(dirpath)
|
|
|
|
download=requests.get(url, headers=UA, proxies=proxies)
|
|
|
|
with open(dirpath + '/index.json',"wb") as code:
|
|
|
|
code.write(download.content)
|
|
|
|
last_dirpath='/index.json'
|
|
|
|
elif(return_headers=="application/java-archive"):
|
|
|
|
return_format="jar"
|
|
|
|
if('.jar' in dirpath):
|
|
|
|
download=requests.get(url, headers=UA, proxies=proxies)
|
|
|
|
with open(dirpath,"wb") as code:
|
|
|
|
code.write(download.content)
|
|
|
|
else:
|
|
|
|
make_dir_exist(dirpath)
|
|
|
|
download=requests.get(url, headers=UA, proxies=proxies)
|
|
|
|
with open(dirpath + '/index.jar',"wb") as code:
|
|
|
|
code.write(download.content)
|
|
|
|
last_dirpath='/index.jar'
|
|
|
|
else:
|
|
|
|
return_format=""
|
|
|
|
download=requests.get(url, headers=UA, proxies=proxies)
|
|
|
|
with open(dirpath,"wb") as code:
|
|
|
|
code.write(download.content)
|
2020-08-09 14:08:44 +00:00
|
|
|
except:
|
|
|
|
log("error",'There has a error in task while downloading the file ' + dirpath + '.')
|
|
|
|
else:
|
|
|
|
log("info",'The file ' + dirpath + ' has been downloaded successfully.')
|
|
|
|
def make_dir_exist(dirpath):
|
|
|
|
global Log
|
|
|
|
if os.path.exists(dirpath):
|
|
|
|
log("info",'The dir ' + dirpath + ' has already existed.Nothing to do.')
|
|
|
|
else:
|
|
|
|
log("info",'The dir ' + dirpath + ' does not exist.We will create one.')
|
|
|
|
os.makedirs(dirpath)
|
2020-08-25 14:40:22 +00:00
|
|
|
download(parser.url,'file/' + parser.file)
|
|
|
|
print("INSERT INTO `common` (`path`, `source`, `last_update`, `type`, `type_sub`, `format`, `header`, `file`) VALUES ('" + parser.file + "', '" + parser.url + "', '" + str(round(float(time.time()))) + "', '" + parser.type + "', '" + parser.type_sub + "', '" + return_format + "', '" + return_headers + "', 'file/" + parser.file + last_dirpath + "');")
|
|
|
|
make_dir_exist('logs')
|
|
|
|
logwrite=open("logs/" + str(time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime()) + '.txt'),mode='w')
|
|
|
|
logwrite.write(Log)
|
|
|
|
logwrite.close()
|