Nginx Httpcode分析脚本

运维 系统运维
本文中通过每5分钟对Nginx日志进行切割,然后用Python程序计算http code的分布,用Zabbix来实现单台机器Nginx qos的监控,配合对Zabbix数据库的Lastvalue进行聚合,可以详细的各个维度的分析。具体Nginx日志使用的分析脚本如正文所示。

之前在做CDN运维的时候,因为业务的特殊性(跨机房,跨ISP,跨区域),把日志集中传输到一个中心来做qos的分析不太现实,因此采用的方法是每5分钟对Nginx日志进行切割,然后通过Python程序计算http code的分布,并通过Zabbix来实现单台机器Nginx qos的监控,配合对Zabbix数据库的Lastvalue进行聚合,则可以监控整个CDN的流量,qos数据等,这样一般发现问题的延迟就在5分钟左右(cdn的qos敏感性不是很强),配合rsync+hadoop+hive来计算nginx的日志,也可以得到更加详细的各个维度的分析(离线数据分析),下面贴下Nginx日志使用的分析脚本:

[[110138]]

先贴下zabbix聚合脚本:

  1. #!/usr/bin/python 
  2. #to get webcdn totaol statistics 
  3. # -*- coding: utf8 -*- 
  4. import MySQLdb 
  5. import sys 
  6. import os 
  7. def get_total_value(sql): 
  8.     db = MySQLdb.connect(host='xxxx',user='xxxx',passwd='xxxx',db='xxxx'
  9.     cursor = db.cursor() 
  10.     cursor.execute(sql) 
  11.     try:   
  12.         result = cursor.fetchone()[0] 
  13.     except: 
  14.         result = 0 
  15.     cursor.close() 
  16.     db.close() 
  17.     return result 
  18. if __name__ == '__main__': 
  19.     sql = '' 
  20.     if sys.argv[1] == "network_traffic": 
  21.         sql = "select round(sum(lastvalue)/(1024*1024),4) from  hosts a, items b   where key_ in ( 'net.if.out[eth1,bytes]','net.if.out[eth0,bytes]') and lower(host) like '%-cdn-cache%'  and a.hostid = b.hostid" 
  22.     elif sys.argv[1] == "nginx_traffic": 
  23.         sql = "select sum(lastvalue) from  hosts a, items b   where key_ = 'log_webcdn_getvalue[traffic]'   and lower(host) like '%cdn-cache%'  and a.hostid = b.hostid" 
  24.     elif sys.argv[1] == "2xxand3xx": 
  25.         sql = "select sum(lastvalue) from  hosts a, items b   where key_ in ( 'log_webcdn_getvalue[200]','log_webcdn_getvalue[300]') and lower(host) like '%-cdn-cache%'  and a.hostid = b.hostid" 
  26.     elif sys.argv[1] == "4xxand5xx": 
  27.         sql = "select sum(lastvalue) from  hosts a, items b   where key_ in ( 'log_webcdn_getvalue[four]','log_webcdn_getvalue[five]') and lower(host) like '%-cdn-cache%'  and a.hostid = b.hostid" 
  28.     elif sys.argv[1] == "network_ss": 
  29.         sql = "select sum(lastvalue) from  hosts a, items b   where key_ = 'network_conn' and lower(host) like '%-cdn-cache%'  and a.hostid = b.hostid" 
  30.     else: 
  31.         sys.exit(0) 
  32. #    print sql 
  33.     value = get_total_value(sql) 
  34.     print value 

然后是单台的分析脚本:

  1. #!/usr/bin/python 
  2. #coding=utf-8 
  3. from __future__ import division 
  4. import subprocess, signal,string 
  5. import codecs 
  6. import re 
  7. import os 
  8. import time, datetime 
  9. import sys 
  10. def show_usage(): 
  11.     print """ 
  12.         python nginx_log_wedcdn.py result_key 
  13.         result_key could be: 
  14.         average_bodysize, response_time, sum_count, count_success, four, 403, 404, 499, five, 500, 502, 503, 200, 300, requests_second 
  15.         response_time_source, percentage_time_1, percentage_time_3, all 
  16.           """ 
  17. def runCmd(command, timeout = 10): 
  18.     start = datetime.datetime.now() 
  19.     process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True
  20.     while process.poll() is None: 
  21.         time.sleep(0.2) 
  22.         now = datetime.datetime.now() 
  23.         if (now - start).seconds > timeout: 
  24.             os.kill(process.pid, signal.SIGKILL) 
  25.             os.waitpid(-1, os.WNOHANG) 
  26.             return None 
  27.     return process.stdout.readlines() 
  28. def get_old_filename(): 
  29.     t = datetime.datetime.now() + datetime.timedelta(minutes = -5) 
  30.     a = t.strftime('%Y-%m-%d-%H') 
  31.     b = t.strftime('%M') 
  32.     b = int(b)//5*5 
  33.     if b < 10: 
  34.         c = "0" + str(b) 
  35.     else: 
  36.         c = str(b) 
  37.     d = "/log/nginx/old/" + a + "-%s.log.gz" % c 
  38.     #print d 
  39.     return d 
  40. def get_new_filename(): 
  41.     t = datetime.datetime.now() + datetime.timedelta(minutes = -5) 
  42.     a = t.strftime('%Y-%m-%d-%H') 
  43.     b = t.strftime('%M') 
  44.     b = int(b)//5*5 
  45.     if b < 10: 
  46.         c = "0" + str(b) 
  47.     else: 
  48.         c = str(b) 
  49.     d = "/log/nginx/old/" + a + "-%s.log" % c 
  50.     #print d 
  51.     return d 
  52. def get_new2_filename(): 
  53.     t = datetime.datetime.now() + datetime.timedelta(minutes = -5) 
  54.     a = t.strftime('%Y-%m-%d-%H') 
  55.     b = t.strftime('%M') 
  56.     b = int(b)//5*5 
  57.     if b < 10: 
  58.         c = "0" + str(b) 
  59.     else: 
  60.         c = str(b) 
  61.     d = "/log/nginx/new/" + a + "-%s.log" % c 
  62.     #print d 
  63.     return d 
  64. def average_flow(): 
  65.     flow = 0 
  66.     flow1 = 0 
  67.     flow_ppsucai = 0 
  68.     flow_asimgs = 0 
  69.     flow_static9 = 0 
  70.     traffic = 0.0 
  71.     traffic1 = 0.0 
  72.     count = 0 
  73.     count_sucai = 0 
  74.     count_sucai_100 = 0 
  75.     count_sucai_30_100 = 0 
  76.     count_sucai_30 = 0 
  77.     count_asimgs = 0 
  78.     count_asimgs_100 = 0 
  79.     count_asimgs_30_100 = 0 
  80.     count_asimgs_30 = 0 
  81.     count_static9 = 0 
  82.     count_static9_100 = 0 
  83.     count_static9_30_100 = 0 
  84.     count_static9_30 = 0 
  85.     sum_time = 0.0 
  86.     sum_ppsucai_time = 0.0 
  87.     sum_asimgs_time = 0.0 
  88.     sum_static9_time = 0.0 
  89.     sum_time_source = 0.0 
  90.     count_200 = 0 
  91.     count_300 = 0 
  92.     count_success = 0 
  93.     count_200_backup = 0 
  94.     count_not_200_backup = 0 
  95.     id_list_200 = [200,206] 
  96.     id_list_300 = [300,301,302,303,304,305,306,307] 
  97.     id_list_success = [200,206,300,301,302,303,304,305,306,307] 
  98.     data_byte = 0 
  99.     elapsed = 0.0 
  100.     response_time = 0.0 
  101.     response_time_source = 0.0 
  102.     requests_second = 0.0 
  103.     requests_second_sucai = 0.0 
  104.     requests_second_asimgs = 0.0 
  105.     list_time_1 = [] 
  106.     list_time_3 = [] 
  107.     list_ip_403 = [] 
  108.     list_ip_404 = [] 
  109.     list_ip_415 = [] 
  110.     list_ip_499 = [] 
  111.     list_ip_500 = [] 
  112.     list_ip_502 = [] 
  113.     list_ip_503 = [] 
  114.     server_list = ['"127.0.0.1:8080"','"127.0.0.1:8081"','"-"'] 
  115.     file_name = get_old_filename() 
  116.     if os.path.isfile("%s" % file_name): 
  117.         Writelog(file_name) 
  118.         i = os.popen("/bin/zcat %s" % file_name).readlines() 
  119.         #i = gzip.GzipFile("%s" % file_name).readlines() 
  120.     else: 
  121.         file_name = get_new_filename() 
  122.         if os.path.isfile("%s" % file_name): 
  123.             Writelog(file_name) 
  124.             i = os.popen("/bin/cat %s" % file_name).readlines() 
  125.         else: 
  126.             #time.sleep(15) 
  127.             file_name = get_new2_filename() 
  128.             if os.path.isfile("%s" % file_name): 
  129.                 Writelog(file_name) 
  130.                 i = os.popen("/bin/cat %s" % file_name).readlines() 
  131.             else: 
  132.                 os.popen("rm -f /tmp/exist.txt") 
  133.                 sys.exit(1) 
  134.     for line in i: 
  135.             count += 1 
  136.             try: 
  137.                 domain_name = line.split()[1] 
  138.             except: 
  139.                 pass   
  140.             try: 
  141.                 web_code = int(line.split()[8]) 
  142.             except: 
  143.                 web_code = 888 
  144.             try: 
  145.                 IP = str(line.split()[0]) 
  146.             except: 
  147.                 pass 
  148.             try:   
  149.                 data_byte = int(line.split()[9]) 
  150.                 #print "data", data_byte 
  151.             except: 
  152.                 data_byte = 0.0001 
  153.             try: 
  154.                 elapsed = float(line.split()[-1].strip('"')) 
  155.                 if elapsed == 0.000: 
  156.                     elapsed = 0.0001 
  157.             except: 
  158.                 elapsed = 0.0001 
  159.             try:   
  160.                 time_source = float(line.split()[-4].strip('"')) 
  161.             except: 
  162.                 time_source = 0.0 
  163.             try: 
  164.                 backup_server =  str(line.split()[-3]) 
  165.             except: 
  166.                 pass 
  167.             flow1 += data_byte 
  168.             if web_code in id_list_success: 
  169.                 flow += data_byte 
  170.                 sum_time_source += time_source 
  171.                 if domain_name != "ppsucai.pptv.com": 
  172.                     sum_time += elapsed 
  173.                 else: 
  174.                     #print domain_name 
  175.                     sum_time += 0.000 
  176.             if web_code in id_list_200: 
  177.                 #print web_code 
  178.                 count_200 += 1 
  179.                 if backup_server not in server_list: 
  180.                     #print web_code, backup_server 
  181.                     count_200_backup += 1 
  182.             elif web_code == 200 and date_byte == 0: 
  183.                 #print line.split()[3].lstrip("[") 
  184.                 WriteURLInfo(line.split()[3].lstrip("[")) 
  185.                 WriteURLInfo("\t") 
  186.                 WriteURLInfo(line.split()[10]) 
  187.                 WriteURLInfo("\n") 
  188.             elif web_code in id_list_300: 
  189.                 count_300 += 1 
  190.             elif web_code == 403 and IP not in list_ip_403: 
  191.                 list_ip_403.append(IP) 
  192.                 #print "this is the sum 403 count:", IP, len(list_ip_403) 
  193.             elif web_code == 404 and IP not in list_ip_404: 
  194.                 list_ip_404.append(IP) 
  195.                 #print "this is the sum 404 count:", IP, len(list_ip_404) 
  196.             elif web_code == 415 and IP not in list_ip_415: 
  197.                 list_ip_415.append(IP) 
  198.                 #print "this is the sum 415 count:", IP, len(list_ip_415) 
  199.             elif web_code == 499 and IP not in list_ip_499: 
  200.                 list_ip_499.append(IP) 
  201.                 #print "this is the sum 499 count:", IP, len(list_ip_499) 
  202.             elif web_code == 500 and IP not in list_ip_500: 
  203.                 list_ip_500.append(IP) 
  204.                 #print "this is the sum 500 count:", IP, len(list_ip_500) 
  205.             elif web_code == 502 and IP not in list_ip_502: 
  206.                 list_ip_502.append(IP) 
  207.                 #print "this is the sum 502 count:", IP, len(list_ip_502) 
  208.             elif web_code == 503 and IP not in list_ip_503: 
  209.                 list_ip_503.append(IP) 
  210.                 #print "this is the sum 503 count:", IP, len(list_ip_503) 
  211.             if web_code not in id_list_200 and backup_server not in server_list: 
  212.                 #print web_code, backup_server 
  213.                 count_not_200_backup += 1 
  214.                                                                                           
  215.             if elapsed > 1.0 and web_code in id_list_success and IP not in list_time_1: 
  216.                 list_time_1.append(IP) 
  217.             elif elapsed > 3.0 and web_code in id_list_success and IP not in list_time_3: 
  218.                 list_time_3.append(IP) 
  219.                                                                                       
  220.             if domain_name == "ppsucai.pptv.com" and web_code in id_list_success: 
  221.                 download_speed_sucai = round(data_byte / elapsed / 1024, 2) 
  222.                 flow_ppsucai += data_byte 
  223.                 sum_ppsucai_time += elapsed 
  224.                 count_sucai += 1 
  225.                 if download_speed_sucai >= 100: 
  226.                     count_sucai_100 += 1 
  227.                 elif download_speed_sucai < 100 and download_speed_sucai >= 30: 
  228.                     count_sucai_30_100 += 1 
  229.                 else: 
  230.                     count_sucai_30 += 1 
  231.             elif domain_name == "asimgs.pplive.cn" and web_code in id_list_success: 
  232.                 download_speed_asimgs = round(data_byte / elapsed / 1024, 2) 
  233.                 flow_asimgs += data_byte 
  234.                 sum_asimgs_time += elapsed 
  235.                 count_asimgs += 1 
  236.                 if download_speed_asimgs >= 100: 
  237.                     count_asimgs_100 += 1 
  238.                 elif download_speed_asimgs < 100 and download_speed_asimgs >= 30: 
  239.                     count_asimgs_30_100 += 1 
  240.                 else: 
  241.                     count_asimgs_30 += 1 
  242.             elif domain_name == "static9.pplive.cn" and web_code in id_list_success: 
  243.                 download_speed_static9 = round(data_byte / elapsed / 1024, 2) 
  244.                 flow_static9 += data_byte 
  245.                 sum_static9_time += elapsed 
  246.                 count_static9 += 1 
  247.                 if download_speed_static9 >= 100: 
  248.                     count_static9_100 += 1 
  249.                 elif download_speed_static9 < 100 and download_speed_static9 >= 30: 
  250.                     count_static9_30_100 += 1 
  251.                 else: 
  252.                     count_static9_30 += 1 
  253.         #else: 
  254.             #break 
  255.     try: 
  256.         traffic = round((flow*1.07*8)/300/1024/1024, 2) 
  257.         #traffic1 = round((flow1*1.07)/300/1024/1024, 2) 
  258.         #print traffic, traffic1 
  259.         #traffic1 = round(flow/sum_time/1024/1024, 2) 
  260.         count_success = count_200 + count_300 
  261.         response_time = round(sum_time/count_success, 2) 
  262.         response_time_source = round(sum_time_source/count_success, 2) 
  263.         requests_second = round(count_success/300, 2) 
  264.         if sum_ppsucai_time == 0.0: 
  265.             sum_ppsucai_time = 0.0001 
  266.         if sum_asimgs_time == 0.0: 
  267.             sum_asimgs_time = 0.0001 
  268.         #print sum_static9_time 
  269.         if sum_static9_time == 0.0: 
  270.             sum_static9_time = 0.0001 
  271.         traffic_ppsucai = round(flow_ppsucai/sum_ppsucai_time/1024, 2) 
  272.         traffic_asimgs = round(flow_asimgs/sum_asimgs_time/1024, 2) 
  273.         traffic_static9 = round(flow_static9/sum_static9_time/1024, 2) 
  274.         #print "flow_static:", flow_static9, "traffic_static9", traffic_static9 
  275.         average_bodysize = round((flow/count_success)/1024, 2) 
  276.         percentage_time_1 = round(len(list_time_1)/count_success*100, 2) 
  277.         percentage_time_3 = round(len(list_time_3)/count_success*100, 2) 
  278.         if count_sucai == 0: 
  279.             count_sucai = 0.0001 
  280.         percentage_sucai_100 = round(count_sucai_100/count_sucai*100, 2) 
  281.         percentage_sucai_30_100 = round(count_sucai_30_100/count_sucai*100, 2) 
  282.         percentage_sucai_30 = round(count_sucai_30/count_sucai*100, 2) 
  283.         if count_asimgs == 0: 
  284.             count_asimgs = 0.0001 
  285.         percentage_asimgs_100 = round(count_asimgs_100/count_asimgs*100, 2) 
  286.         percentage_asimgs_30_100 = round(count_asimgs_30_100/count_asimgs*100, 2) 
  287.         percentage_asimgs_30 = round(count_asimgs_30/count_asimgs*100, 2) 
  288.         #print count_static9 
  289.         if count_static9 == 0: 
  290.             count_static9 = 0.0001 
  291.         percentage_static9_100 = round(count_static9_100/count_static9*100, 2) 
  292.         #print count_static9_100, "100", percentage_static9_100 
  293.         percentage_static9_30_100 = round(count_static9_30_100/count_static9*100, 2) 
  294.         #print count_static9_30_100, "30-100", percentage_static9_30_100 
  295.         percentage_static9_30 = round(count_static9_30/count_static9*100, 2) 
  296.         #print count_static9_30, "30", percentage_static9_30 
  297.         requests_second_sucai = round(count_sucai/300, 2) 
  298.         requests_second_asimgs = round(count_asimgs/300, 2) 
  299.         requests_second_static9 = round(count_static9/300, 2) 
  300.         #print requests_second_static9 
  301.         #print count, "this is the count of 2xx_backup:", count_200_backup,"%", round(count_200_backup/count, 4),"this is the count of !2xx_backup:", count_not_200_backup, round(count_not_200_backup/count, 4) 
  302.         percentage_200_backup = round(count_200_backup/count*100, 2) 
  303.         percentage_not_200_backup = round(count_not_200_backup/count*100, 2) 
  304.         return average_bodysize, response_time, count, count_success, len(list_ip_403), len(list_ip_404), len(list_ip_499), len(list_ip_500), len(list_ip_502), len(list_ip_503), count_200, count_300, requests_second, response_time_source, len(list_time_1), len(list_time_3), percentage_time_1, percentage_time_3,count_sucai,percentage_sucai_100, percentage_sucai_30_100, percentage_sucai_30, requests_second_sucai, count_asimgs, percentage_asimgs_100, percentage_asimgs_30_100, percentage_asimgs_30, requests_second_asimgs, traffic_ppsucai, traffic_asimgs, traffic, traffic_static9, count_static9, percentage_static9_100, percentage_static9_30_100, percentage_static9_30, requests_second_static9, percentage_200_backup, percentage_not_200_backup, len(list_ip_415) 
  305.     except: 
  306.         return 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 
  307. def log_files(pwd): 
  308.         log_file_list = [] 
  309.         files = os.popen("ls %s" % pwd).readlines() 
  310.         for x in files: 
  311.                 if x.strip().endswith("log"): 
  312.                         log_file_list.append(x.strip()) 
  313.         return log_file_list 
  314. def result_dic(): 
  315.     list = average_flow() 
  316.     #print list 
  317. #   print list 
  318.     result = {} 
  319.     result['average_bodysize'] = list[0] 
  320.     result['response_time'] = list[1] 
  321.     result['sum_count'] = list[2] 
  322.     result['count_success'] = list[3] 
  323.     result['four'] = list[4] + list[5] + list[6] + list[39] 
  324. #   print 'four','=','%s' % list[4],'+','%s' % list[5],'+','%s' % list[6],'+','%s' % list[39], result['four'] 
  325.     result['403'] = list[4] 
  326. #   print '403', result['403'] 
  327.     result['404'] = list[5] 
  328. #   print '404', result['404'] 
  329.     result['499'] = list[6] 
  330. #   print '499', result['499'] 
  331.     result['415'] = list[39] 
  332. #   print '415', result['415'] 
  333.     result['five'] = list[7] + list[8] + list[9] 
  334.     result['500'] = list[7] 
  335.     result['502'] = list[8] 
  336.     result['503'] = list[9] 
  337.     result['200'] = list[10] 
  338.     result['300'] = list[11] 
  339.     result['requests_second'] = list[12] 
  340.     result['response_time_source'] = list[13] 
  341.     result['percentage_time_1'] = list[16] 
  342.     result['percentage_time_3'] = list[17] 
  343.     result['count_sucai'] = list[18] 
  344.     result['percentage_sucai_100'] = list[19] 
  345.     result['percentage_sucai_30_100'] = list[20] 
  346.     result['percentage_sucai_30'] = list[21] 
  347.     result['requests_second_sucai'] = list[22] 
  348.     result['count_asimgs'] = list[23] 
  349.     result['percentage_asimgs_100'] = list[24] 
  350.     result['percentage_asimgs_30_100'] = list[25] 
  351.     result['percentage_asimgs_30'] = list[26] 
  352.     result['requests_second_asimgs'] = list[27] 
  353.     result['traffic_ppsucai'] = list[28] 
  354.     result['traffic_asimgs'] = list[29] 
  355.     result['traffic'] = list[30] 
  356.     result['traffic_static9'] = list[31] 
  357.     result['count_static9'] = list[32] 
  358.     result['percentage_static9_100'] = list[33] 
  359.     result['percentage_static9_30_100'] = list[34] 
  360.     result['percentage_static9_30'] = list[35] 
  361.     result['requests_second_static9'] = list[36] 
  362.     result['percentage_200_backup'] = list[37] 
  363.     result['percentage_not_200_backup'] = list[38] 
  364.     result['all'] = list 
  365.     return result 
  366. def Writelog(msg): 
  367.     o = open("/log/nginx/qos_result_new"+".log","aw") 
  368.     o.write(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + ":" + msg + "\n") 
  369.     o.close() 
  370. def WriteTmpInfo(msg): 
  371.     o = open("/tmp/webcdnqos_result"+".txt","aw+") 
  372.     o.write(msg+"\n") 
  373.     o.close() 
  374. def WriteURLInfo(msg): 
  375.     today = datetime.date.today() 
  376.     o = open("/tmp/webcdnqos_url_%s" % today.strftime('%Y-%m-%d') + ".log","aw") 
  377. #   o.write(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + "    " +msg+"\n") 
  378.     o.write(msg) 
  379.     o.close() 
  380. if __name__ == "__main__": 
  381.     if len(sys.argv) < 2: 
  382.         show_usage() 
  383.         os.popen("rm -f /tmp/exist.txt") 
  384.             sys.exit(1) 
  385.     else: 
  386.         if os.path.isfile("/tmp/exist.txt"): 
  387.             sys.exit(1) 
  388.         else: 
  389.             os.popen("echo 'hello' > /tmp/exist.txt") 
  390.             result_key = sys.argv[1] 
  391.             status = result_dic() 
  392.             os.popen(">/tmp/webcdnqos_result.txt") 
  393.             print status[result_key] 
  394.             Writelog(str(status[result_key])) 
  395.             for i in status.keys(): 
  396.                 WriteTmpInfo(str(i)+"="+str(status[i])) 
  397.             os.popen("rm -f /tmp/exist.txt") 
责任编辑:黄丹 来源: 51TCO博客
相关推荐

2012-09-20 10:07:29

Nginx源码分析Web服务器

2011-08-16 09:34:34

Nginx

2010-09-06 14:14:32

ppp-on

2013-08-26 13:41:18

Nginx

2016-09-22 12:25:42

2010-03-25 18:31:03

Nginx配置文件

2013-10-31 11:08:15

2013-10-31 16:17:45

日志分析Awstats实战Nginx

2010-03-30 09:38:58

2017-11-30 18:42:22

PythonCPU脚本分析

2020-04-09 13:23:29

Nginx配置文件模板

2020-04-27 07:13:37

Nginx底层进程

2018-01-16 10:11:11

Nginx访问日志

2011-12-02 22:51:46

Nginx负载均衡

2013-12-17 09:42:53

Nginx性能分析http服务器

2023-10-24 07:22:22

Nginx运维管理

2020-02-21 10:45:06

运维架构技术

2023-10-27 08:52:03

Python脚本关系

2010-05-24 13:36:11

2023-10-30 09:01:08

Nginx日志分析
点赞
收藏

51CTO技术栈公众号