全球主机交流论坛备用站

 找回密码
 立即注册

QQ登录

只需一步,快速开始

全球云38元起免备案V2EX搬瓦工VPS
SEO查询超级PING网站测速
Virmach特价鸡乌云漏洞吾爱破解
查看: 37|回复: 1

分享JMS流量统计面板

[复制链接]

2

主题

24

回帖

98

积分

注册会员

Rank: 2

积分
98
发表于 2025-3-14 16:25:13 | 显示全部楼层 |阅读模式
20230417
本帖最后由 雪秋千 于 2025-3-14 18:05 编辑

各位大侠有justmysocks的API的流量统计面板吗?自己用GPT写的太丑。
下面是自己写的效果预览图
Screenshot 2025-03-14 180331.png (65.11 KB, 下载次数: 2)
MJJ,你的签名掉了~~~
回复

举报

2

主题

24

回帖

98

积分

注册会员

Rank: 2

积分
98
 楼主| 发表于 2025-3-14 18:02:58 | 显示全部楼层
先把我的放上来吧
1. 首先是本地存储数据的,定期30秒运行一次。因为jms更新时间差不多就是半分钟。
  1. import requests
  2. import json
  3. from datetime import datetime

  4. API_URL = "https://justmysocks6.net/members/getbwcounter.php?service="
  5. HISTORY_FILE = "扶Q_usage_history.json"

  6. def fetch_bw_counter():
  7. response = requests.get(API_URL)
  8. data = response.json()
  9. # data = {"monthly_bw_limit_b": 1000000000000,
  10. # "bw_counter_b": 651529129,
  11. # "bw_reset_day_of_month": 13}
  12. return data["bw_counter_b"]

  13. def load_history():
  14. try:
  15. with open(HISTORY_FILE, "r") as f:
  16. return json.load(f)
  17. except (IOError, ValueError):
  18. return []

  19. def save_history(history):
  20. with open(HISTORY_FILE, "w") as f:
  21. json.dump(history, f, indent=2)

  22. def record_usage():
  23. # 1) Fetch the current usage
  24. current_bw = fetch_bw_counter()
  25. timestamp = datetime.utcnow().isoformat()

  26. # 2) Append new record to history
  27. history = load_history()
  28. history.append({
  29. "timestamp": timestamp,
  30. "bw_counter_b": current_bw
  31. })
  32. save_history(history)

  33. if __name__ == "__main__":
  34. record_usage()
复制代码


2. 第二是显示面板,第一个面板是总体使用量,第二个是在不同时间的速率,
  1. import json
  2. import pandas as pd
  3. from datetime import timedelta
  4. import dash
  5. from dash import dcc, html, Input, Output
  6. import plotly.graph_objs as go

  7. # ----- Utility Functions -----

  8. def load_usage_data(file_path="扶Q_usage_history.json"):
  9. """Load usage data from JSON, localize to America/Los_Angeles, then convert to UTC.
  10. Plotly will automatically render these timestamps in the visitor’s local time.
  11. """
  12. with open(file_path, "r") as f:
  13. data = json.load(f)
  14. df = pd.DataFrame(data)
  15. # Assume timestamps in the file are in California time.
  16. df['timestamp'] = pd.to_datetime(df['timestamp']).dt.tz_localize('America/Los_Angeles')
  17. # Convert to UTC for consistent plotting.
  18. df['timestamp'] = df['timestamp'].dt.tz_convert('UTC')
  19. df.sort_values('timestamp', inplace=True)
  20. return df

  21. def convert_bytes(value_bytes):
  22. """
  23. Convert a byte value to a human-friendly string using a 1000 conversion factor.
  24. If the value in GB is less than 0.001, display in MB.
  25. If in MB is less than 0.001, display in B.
  26. """
  27. value_gb = value_bytes / 1e9
  28. if value_gb >= 0.001:
  29. return f"{value_gb:.3f} GB"
  30. value_mb = value_bytes / 1e6
  31. if value_mb >= 0.001:
  32. return f"{value_mb:.3f} MB"
  33. return f"{value_bytes} B"

  34. def aggregate_data(df, resolution, window):
  35. """
  36. Aggregate usage data for a given resolution and time window.
  37. resolution: a pandas offset alias, e.g., 'T' for minute, 'H' for hour, 'D' for day, 'W' for week.
  38. window: timedelta object representing the lookback period.
  39. """
  40. end_time = df['timestamp'].max()
  41. start_time = end_time - window
  42. df_window = df[df['timestamp'] >= start_time].copy()
  43. if df_window.empty:
  44. return pd.DataFrame(columns=['timestamp', 'bw_counter_b'])
  45. df_window.set_index('timestamp', inplace=True)
  46. df_resampled = df_window.resample(resolution).last().dropna()
  47. df_resampled.reset_index(inplace=True)
  48. return df_resampled

  49. def compute_usage_rates(df):
  50. """
  51. Compute the incremental usage (difference between consecutive bw_counter_b)
  52. and time differences. Returns the DataFrame with a new column 'usage_diff'.
  53. """
  54. df = df.copy()
  55. df['usage_diff'] = df['bw_counter_b'].diff()
  56. df['time_diff_sec'] = df['timestamp'].diff().dt.total_seconds()
  57. df['usage_rate'] = df['usage_diff'] / df['time_diff_sec']
  58. return df

  59. # ----- Dash App Setup -----

  60. app = dash.Dash(__name__)
  61. server = app.server

  62. app.layout = html.Div([
  63. html.H1("扶Q Data Usage Dashboard"),
  64. html.Div([
  65. html.Button("Minutes", id="btn-minutes", n_clicks=0),
  66. html.Button("Hourly", id="btn-hourly", n_clicks=0),
  67. html.Button("Daily", id="btn-daily", n_clicks=0),
  68. html.Button("Weekly", id="btn-weekly", n_clicks=0)
  69. ], style={'marginBottom': '20px'}),
  70. html.Div(id="summary-stats", style={'marginBottom': '20px'}),
  71. dcc.Graph(id="usage-graph"),
  72. dcc.Graph(id="rate-graph"),
  73. dcc.Interval(id="interval-update", interval=60*1000, n_intervals=0)# update every minute
  74. ])

  75. # ----- Callback to Update Graphs and Stats -----

  76. @app.callback(
  77. [Output("usage-graph", "figure"),
  78. Output("rate-graph", "figure"),
  79. Output("summary-stats", "children")],
  80. [Input("btn-minutes", "n_clicks"),
  81. Input("btn-hourly", "n_clicks"),
  82. Input("btn-daily", "n_clicks"),
  83. Input("btn-weekly", "n_clicks"),
  84. Input("interval-update", "n_intervals")]
  85. )
  86. def update_dashboard(n_min, n_hour, n_day, n_week, n_interval):
  87. df = load_usage_data()

  88. # Determine which button was most recently pressed
  89. ctx = dash.callback_context
  90. if not ctx.triggered:
  91. resolution_choice = 'H'
  92. window = timedelta(hours=24)
  93. else:
  94. button_id = ctx.triggered[0]['prop_id'].split('.')[0]
  95. if button_id == "btn-minutes":
  96. resolution_choice = 'T'# minute resolution
  97. window = timedelta(hours=1)
  98. elif button_id == "btn-hourly":
  99. resolution_choice = 'H'
  100. window = timedelta(hours=24)
  101. elif button_id == "btn-daily":
  102. resolution_choice = 'D'
  103. window = timedelta(days=7)
  104. elif button_id == "btn-weekly":
  105. resolution_choice = 'W'
  106. window = timedelta(weeks=4)
  107. else:
  108. resolution_choice = 'H'
  109. window = timedelta(hours=24)

  110. df_agg = aggregate_data(df, resolution_choice, window)
  111. df_rate = compute_usage_rates(df_agg)

  112. # ----- Cumulative Usage Figure -----
  113. cum_fig = go.Figure()
  114. cum_fig.add_trace(go.Scatter(
  115. x=df_agg['timestamp'],
  116. y=df_agg['bw_counter_b'] / 1e9,# cumulative usage in GB
  117. mode='lines+markers',
  118. name="Cumulative Usage (GB)",
  119. connectgaps=False
  120. ))
  121. cum_fig.update_layout(
  122. title="扶Q Cumulative Usage Over Time",
  123. xaxis_title="Time",
  124. yaxis_title="Usage (GB)",
  125. hovermode="x unified"
  126. )

  127. # ----- Usage Rate Figure -----
  128. df_rate_clean = df_rate.dropna(subset=['usage_diff'])
  129. if not df_rate_clean.empty:
  130. max_diff = df_rate_clean['usage_diff'].max()
  131. if max_diff / 1e9 >= 0.001:
  132. factor = 1e9
  133. y_label = "Usage per Interval (GB)"
  134. elif max_diff / 1e6 >= 0.001:
  135. factor = 1e6
  136. y_label = "Usage per Interval (MB)"
  137. else:
  138. factor = 1
  139. y_label = "Usage per Interval (B)"
  140. usage_diff_converted = df_rate_clean['usage_diff'] / factor
  141. else:
  142. usage_diff_converted = []
  143. y_label = "Usage per Interval"

  144. rate_fig = go.Figure()
  145. rate_fig.add_trace(go.Scatter(
  146. x=df_rate_clean['timestamp'],
  147. y=usage_diff_converted,
  148. mode='lines+markers',
  149. name="Interval Usage",
  150. connectgaps=False
  151. ))
  152. rate_fig.update_layout(
  153. title="扶Q Usage Rate Over Time",
  154. xaxis_title="Time",
  155. yaxis_title=y_label,
  156. hovermode="x unified"
  157. )

  158. # ----- Summary Statistics -----
  159. if not df_rate['usage_rate'].dropna().empty:
  160. avg_rate = df_rate['usage_rate'].dropna().mean()# bytes per second
  161. avg_per_min = convert_bytes(avg_rate * 60)
  162. avg_per_hour = convert_bytes(avg_rate * 3600)
  163. avg_per_day = convert_bytes(avg_rate * 3600 * 24)
  164. avg_per_week = convert_bytes(avg_rate * 3600 * 24 * 7)
  165. else:
  166. avg_per_min = avg_per_hour = avg_per_day = avg_per_week = "N/A"

  167. summary = html.Div([
  168. html.P(f"Average Usage per Minute: {avg_per_min}"),
  169. html.P(f"Average Usage per Hour: {avg_per_hour}"),
  170. html.P(f"Average Usage per Day: {avg_per_day}"),
  171. html.P(f"Average Usage per Week: {avg_per_week}")
  172. ])

  173. return cum_fig, rate_fig, summary

  174. if __name__ == '__main__':
  175. app.run_server(debug=True)
复制代码
MJJ,你的签名掉了~~~
回复 支持 反对

举报

您需要登录后才可以回帖 登录 | 立即注册

本版积分规则

展开

QQ|Archiver|手机版|小黑屋|全球主机交流论坛备用站

GMT+8, 2025-4-29 05:18 , Processed in 0.049467 second(s), 27 queries , Gzip On.

Powered by Discuz! X3.4

© 2001-2023 Discuz! Team.

快速回复 返回顶部 返回列表