Skip to content

Commit

Permalink
Merge pull request #38 from TTB-Network/dev/dashboard
Browse files Browse the repository at this point in the history
🐛 修复漏洞
  • Loading branch information
SilianZ authored Apr 16, 2024
2 parents 0089791 + e946e03 commit 561dd9e
Show file tree
Hide file tree
Showing 5 changed files with 44 additions and 23 deletions.
31 changes: 20 additions & 11 deletions core/cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -740,17 +740,18 @@ async def get(self, file: str, offset: int = 0) -> File:
file,
size=int(resp.headers.get("Content-Length", 0)),
)
f.headers = {}
for field in (
"ETag",
"Last-Modified",
"Content-Length",
"Content-Range",
):
if field not in resp.headers:
continue
f.headers[field] = resp.headers.get(field)
if resp.status == 200:
f.headers = {}
for field in (
"ETag",
"Last-Modified",
"Content-Length",
"Content-Range",
"Accept-Ranges"
):
if field not in resp.headers:
continue
f.headers[field] = resp.headers.get(field)
f.set_data(await resp.read())
f.expiry = time.time() + CACHE_TIME
elif resp.status // 100 == 3:
Expand Down Expand Up @@ -909,7 +910,8 @@ async def get(self, hash: str, offset: int, ip: str, ua: str = "") -> Optional[F
if not exists:
return None
file = await storage.get(hash, offset)
self._storage_stats[storage].hit(file, offset, ip, ua)
if file is not None:
self._storage_stats[storage].hit(file, offset, ip, ua)
return file

def get_storage_stat(self, storage):
Expand Down Expand Up @@ -1010,7 +1012,10 @@ async def enable(self):
if self.want_enable or self.enabled:
logger.tdebug("cluster.debug.cluster.enable_again")
return
timeoutTimer = None
async def _(err, ack):
if timeoutTimer is not None:
timeoutTimer.block()
self.want_enable = False
if err:
logger.terror(
Expand All @@ -1028,6 +1033,9 @@ async def _(err, ack):
)
await dashboard.set_status("cluster.enabled" + (".trusted" if self.trusted else ""))
await self.keepalive()
async def _timeout():
self.want_enable = False
await self.retry()
self.want_enable = True
self.keepalive_failed = 0
await self.channel_lock.wait()
Expand Down Expand Up @@ -1062,6 +1070,7 @@ async def _(err, ack):
},
callback=_
)
timeoutTimer = Timer.delay(_timeout, delay=120)
async def keepalive(self):
def _clear():
if self.keepalive_timer is not None:
Expand Down
8 changes: 8 additions & 0 deletions core/dashboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,15 @@ async def process(type: str, data: Any):
)
)
return data
if type == "global_stats":
return {
"ip": {

},
"ua": {

}
}

async def get_cache_stats() -> StatsCache:
stat = StatsCache()
Expand Down
13 changes: 9 additions & 4 deletions core/stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,6 @@ def read_storage():
with open("./cache/storage.bin", "rb") as r:
f = FileDataInputStream(r)
last_hour = f.readVarInt()
GlobalStats.from_binary(f.read(f.readVarInt()))
for _ in range(f.readVarInt()):
storage = StorageStats(f.readString())
(
Expand All @@ -244,15 +243,18 @@ def read_storage():
)

storages[storage.get_name()] = storage
try:
blength = f.readVarInt()
bdata = f.read(bdata)
except:
return
GlobalStats.from_binary(bdata)


def write_storage():
global storages, globalStats
f = DataOutputStream()
f.writeVarInt(last_hour)
data = globalStats.get_binary()
f.writeVarInt(len(data))
f.write(data)
f.writeVarInt(len(storages))
for storage in storages.values():
f.writeString(storage.get_name())
Expand All @@ -266,6 +268,9 @@ def write_storage():
storage._last_bytes,
):
f.writeVarInt(field)
data = globalStats.get_binary()
f.writeVarInt(len(data))
f.write(data)
with open("./cache/storage.bin", "wb") as w:
w.write(f.io.getbuffer())

Expand Down
8 changes: 4 additions & 4 deletions core/web.py
Original file line number Diff line number Diff line change
Expand Up @@ -825,7 +825,7 @@ async def __call__(
r"bytes=(\d+)-", range_str, re.S
)
end_bytes = length - 1
if range_match:
if range_match and self.status_code == 200:
start_bytes = int(range_match.group(1)) if range_match else 0
if range_match.lastindex == 2:
end_bytes = int(range_match.group(2))
Expand All @@ -838,7 +838,7 @@ async def __call__(
)
self.set_headers(
{
"Accept-ranges": "bytes",
"Accept-Ranges": "bytes",
"Content-Range": f"bytes {start_bytes}-{end_bytes}/{length}",
}
)
Expand All @@ -848,7 +848,7 @@ async def __call__(
self.content_type = self.content_type or self._get_content_type(content)
compression: Compressor = compressor(
await request.get_headers("Accept-Encoding", ""),
content.getbuffer()[start_bytes:length],
content.getbuffer()[start_bytes:end_bytes + 1],
)
if compression.type is None:
content = compression.data
Expand Down Expand Up @@ -896,7 +896,7 @@ async def __call__(
client.write(content.data)
await client.writer.drain()
elif isinstance(content, io.BytesIO):
client.write(content.getbuffer()[start_bytes:length])
client.write(content.getbuffer()[start_bytes:end_bytes + 1])
await client.writer.drain()
elif isinstance(content, Path):
async with aiofiles.open(content, "rb") as r:
Expand Down
7 changes: 3 additions & 4 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,9 @@
)

if __name__ == "__main__":
if sys.version_info <= (3, 8):
print(f"Not support version: {sys.version}")
if sys.version_info == (3, 9):
print(f"Warning version: {sys.version}")
if sys.version_info <= (3, 9):
print(f"Not support version: {sys.version}. Please update python to 3.10+")
exit(-1)
import core

core.init()

0 comments on commit 561dd9e

Please sign in to comment.