mirror of
https://github.com/JamesonHuang/OpenWrt_Luci_Lua.git
synced 2024-11-23 22:00:11 +00:00
write cacheUtils code
This commit is contained in:
parent
e43a0d72ec
commit
0f81e2792c
104
1_7.http_proxy_server/python/testCode/cache/img.htm
vendored
Normal file
104
1_7.http_proxy_server/python/testCode/cache/img.htm
vendored
Normal file
File diff suppressed because one or more lines are too long
Before Width: | Height: | Size: 6.8 KiB After Width: | Height: | Size: 6.8 KiB |
Before Width: | Height: | Size: 14 KiB After Width: | Height: | Size: 14 KiB |
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -2,8 +2,9 @@
|
|||||||
# coding=utf-8
|
# coding=utf-8
|
||||||
import urllib
|
import urllib
|
||||||
import urllib2
|
import urllib2
|
||||||
|
import json
|
||||||
class CacheUtils:
|
class CacheUtils:
|
||||||
|
@staticmethod
|
||||||
def cbk(a, b, c):
|
def cbk(a, b, c):
|
||||||
'''''回调函数
|
'''''回调函数
|
||||||
@a: 已经下载的数据块
|
@a: 已经下载的数据块
|
||||||
@ -16,15 +17,53 @@ class CacheUtils:
|
|||||||
print '%.2f%%' % per
|
print '%.2f%%' % per
|
||||||
|
|
||||||
def download(self, url, local):
|
def download(self, url, local):
|
||||||
urllib.urlretrieve(url, local, cbk)
|
urllib.urlretrieve(url, local, self.cbk)
|
||||||
|
|
||||||
def cache(self, url):
|
def cache(self, url, range):
|
||||||
fileName = url.split('/')[-1]
|
fileName = url.split('/')[-1]
|
||||||
req = urllib2.Request(url)
|
req = urllib2.Request(url)
|
||||||
#req.add_header('Range', 'bytes=0-7000')
|
req.add_header('Range', 'bytes=' + range)
|
||||||
response = urllib2.urlopen(req)
|
response = urllib2.urlopen(req)
|
||||||
buffer = response.read()
|
buffer = response.read()
|
||||||
with open("./cache/img.png", "a+") as fp:
|
with open("./cache/" + fileName + range, "a+") as fp:
|
||||||
fp.write(buffer)
|
fp.write(buffer)
|
||||||
|
|
||||||
|
def saveReq(self, url):
|
||||||
|
|
||||||
|
# Reading data back
|
||||||
|
with open('data.json', 'r') as fp:
|
||||||
|
data = json.load(fp)
|
||||||
|
data[url] = 4000
|
||||||
|
# Writing JSON data
|
||||||
|
with open('data.json', 'w') as fp:
|
||||||
|
json.dump(data, fp)
|
||||||
|
|
||||||
|
|
||||||
|
def checkReq(self):
|
||||||
|
# Reading data back
|
||||||
|
with open('data.json', 'r') as fp:
|
||||||
|
data = json.load(fp)
|
||||||
|
#print(data)
|
||||||
|
#print(data.keys())
|
||||||
|
print(data["www.baidu.com"])
|
||||||
|
if data.get("key"):
|
||||||
|
print(data["key"])
|
||||||
|
else:
|
||||||
|
print("error")
|
||||||
|
|
||||||
|
"""
|
||||||
|
if __name__ == '__main__':
|
||||||
|
cacheUtils = CacheUtils()
|
||||||
|
|
||||||
|
#url = "http://www.sina.com.cn"
|
||||||
|
#fileName = url.split('/')[-1]
|
||||||
|
#cacheUtils.download(url, "./cache/" + fileName)
|
||||||
|
|
||||||
|
#cacheUtils.cache("http://www.baidu.com")
|
||||||
|
#cacheUtils.cache("https://ss0.bdstatic.com/5aV1bjqh_Q23odCf/static/superplus/img/logo_white_ee663702.png", "0-7000")
|
||||||
|
#cacheUtils.cache("https://ss0.bdstatic.com/5aV1bjqh_Q23odCf/static/superplus/img/logo_white_ee663702.png", "7001-14175")
|
||||||
|
|
||||||
|
cacheUtils.saveReq("http://www.sina.com.cn")
|
||||||
|
|
||||||
|
#cacheUtils.loadReq()
|
||||||
|
"""
|
||||||
|
1
1_7.http_proxy_server/python/testCode/data.json
Normal file
1
1_7.http_proxy_server/python/testCode/data.json
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"http://www.sina.com.cn": 4000}
|
1
1_7.http_proxy_server/python/testCode/dd.json
Normal file
1
1_7.http_proxy_server/python/testCode/dd.json
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"www.baidu.com": 1000, "http://www.sina.com.cn": 4000}
|
Loading…
Reference in New Issue
Block a user