diff --git a/cronjob/media/vd_compress.py b/cronjob/media/vd_compress.py index 1cdc6a7..f1c86b2 100644 --- a/cronjob/media/vd_compress.py +++ b/cronjob/media/vd_compress.py @@ -1,8 +1,15 @@ +import time + from lib.all import * import ffmpy import cv2 import oss2 +service_name = 'vd_compress' +# log_dir = '/app/log' +log_dir = '/Users/erwin/data/log' +logger = Logger(service_name, log_dir=log_dir) + # input_file = "h264_origin.mp4" # output_file = "h265_origin.mp4" # # @@ -46,7 +53,7 @@ class S: def __init__(self): self.bucket = oss2.Bucket(oss2.Auth(access_key_id, access_key_secret), endpoint_internal, bucket_name) self.url_get_videos_by_status = "https://api.tiefen.fun/op/media/get_videos_by_status" - self.url_update_video_compress = "https://api.tiefen.fun/op/media/url_update_video_compress" + self.url_update_video_compress = "https://api.tiefen.fun/op/media/update_video_compress" self.hw_cdn_host = "https://filecdnhw01.tiefen.fun/" def save_video_from_oss(self, oss_src_id: str, local_path: str): @@ -57,7 +64,7 @@ class S: def get_one_wait_compress_video(self): param = { - "ids": [23431], + # "ids": [23431], "status": 0, "offset": 0, "limit": 1, @@ -69,6 +76,20 @@ class S: return lis[0] return None + def set_compress_finish(self, video_id, size_src, src_id_h264, size_h264, status, resize_t): + param = { + "id": video_id, + "size_src": size_src, + "src_id_h264": src_id_h264, + "size_h264": size_h264, + "status": status, + "resize_t": resize_t, + } + logger.Info("{}".format(dict2json(param))) + res = call_service(self.url_update_video_compress, param) + ret = safe_get_int(res, "ret") + return ret + def proc_one(self): video = self.get_one_wait_compress_video() if not video: @@ -76,10 +97,13 @@ class S: video_id = safe_get_int(video, "id") src_id = safe_get_str(video, "src_id") + src_id_h264 = src_id.replace("vdprod", "vdprodh264") src_id_python_type = src_id.replace("/", "_") + src_id_h264_python_type = src_id_h264.replace("/", "_") cur_dir = os.getcwd() + "/" local_src_path = cur_dir + src_id_python_type + local_h264_path = cur_dir + src_id_h264_python_type # 下载视频到本地 obj = self.save_video_from_oss(src_id, local_src_path) @@ -91,22 +115,59 @@ class S: elif content_type == "video/quicktime": local_src_path_new = local_src_path + ".mov" else: - print("invalid content_type, id: {}, src_id: {}, content_type: {}", video_id, self.hw_cdn_host + src_id, content_type) + logger.Info("invalid content_type, id: {}, src_id: {}, content_type: {}", video_id, self.hw_cdn_host + src_id, content_type) return os.renames(local_src_path, local_src_path_new) # 转成264 mp4 input_file = local_src_path_new - output_file = local_src_path + ".mp4" + output_file = local_h264_path + ".mp4" ff = ffmpy.FFmpeg( inputs={input_file: None}, outputs={output_file: '-c:v libx264'} ) ff.run() - print(content_type, file_size) - print(local_src_path_new) + # 上传 + upload_ret = self.upload_video_to_oss(output_file, src_id_h264) + upload_ret_status = upload_ret.status + logger.Info("upload_ret, {}, {}".format(src_id, upload_ret_status)) + output_content_type = "video/mp4" + output_file_size = os.path.getsize(output_file) + + # 更新db + db_ret = self.set_compress_finish(video_id, file_size, src_id_h264, output_file_size, 100, int(time.time())) + + os.remove(input_file) + os.remove(output_file) + + logger.Info("before, {}, {}, {}".format(src_id, content_type, file_size)) + logger.Info("after_h264, {}, {}, {}, {}".format(src_id_h264, output_content_type, output_file_size, db_ret)) + logger.Info("host: {}".format(self.hw_cdn_host + src_id_h264)) + + +def get_video_stat(video_path): + video = cv2.VideoCapture(video_path) + + # 获取视频帧数 + total_frames = int(video.get(cv2.CAP_PROP_FRAME_COUNT)) + + # 获取视频帧率 + fps = video.get(cv2.CAP_PROP_FPS) + + # 获取视频宽度和高度 + width = int(video.get(cv2.CAP_PROP_FRAME_WIDTH)) + height = int(video.get(cv2.CAP_PROP_FRAME_HEIGHT)) + + print(video_path) + print(f"帧数: {total_frames}") + print(f"帧率: {fps}") + print(f"宽度: {width}") + print(f"高度: {height}") s = S() -s.proc_one() +while True: + s.proc_one() +# get_video_stat("vdprod_7b_09_f8c4-bd63-4d4a-898b-6d1431f06994.mov") +# get_video_stat("vdprod_7b_09_f8c4-bd63-4d4a-898b-6d1431f06994.mp4") diff --git a/cronjob/vas/calc.py b/cronjob/vas/calc.py new file mode 100644 index 0000000..ae697ba --- /dev/null +++ b/cronjob/vas/calc.py @@ -0,0 +1,131 @@ +from lib.all import * + +service_name = 'vas_calc' +log_dir = '/app/log' +logger = Logger(service_name, log_dir=log_dir) + +hds = [ + "时间", + "会员数", "会员流水", + "空间解锁数", "空间解锁流水", + "空间超粉数", "空间超粉流水", + "空间动态数", "空间动态流水", + "其他数(微信+金币)", "其他流水(微信+金币)", + "总流水", "主播分成", "净收入", + "利润率", + "会员流水占比", + "空间解锁占比", + "空间超粉占比", + "空间动态占比", + "其他占比", +] +csv_w = Csv("income.csv", header=hds) + + +class S: + def __init__(self, st, et): + self.st = st + self.et = et + self.mysql_db_vas = Mysql( + "rm-bp11t1616a1kjvmx5.mysql.rds.aliyuncs.com", 3306, "vas", "root", "Wishpal2024" + ) + + self.csv = Csv("income") + + def __del__(self): + self.mysql_db_vas.close() + + def get_product_sold_list(self): + sql = ''' +select product_id, + count(1) cnt, + sum(pay_amount) money +from vas_order +where ct>={} and ct<{} +and order_status in (1,2) +group by product_id +'''.format(self.st, self.et) + docs = self.mysql_db_vas.query(sql) + return docs + + def get_streamer_dias(self): + sql = ''' +select sum(`change`) dias +from vas_ch_income +where ct>={} and ct<{} +and mid>0 +'''.format(self.st, self.et) + docs = self.mysql_db_vas.query(sql) + if len(docs) > 0: + return safe_get_int(docs[0], "dias") + return 0 + + def proc(self): + total_money = 0 # 总流水 + membership_cnt = 0 + membership_money = 0 + zone_admission_cnt = 0 + zone_admission_money = 0 + zone_superfan_cnt = 0 + zone_superfan_money = 0 + zone_moment_cnt = 0 + zone_moment_money = 0 + other_cnt = 0 + other_money = 0 + + sold_list = self.get_product_sold_list() + for sold in sold_list: + product_id = safe_get_str(sold, "product_id") + cnt = safe_get_int(sold, "cnt") + money = int(safe_get_int(sold, "money") / 100) + total_money += money + if product_id == "membership": + membership_cnt += cnt + membership_money += money + elif product_id == "h5_zone_admission": + zone_admission_cnt += cnt + zone_admission_money += money + elif product_id == "h5_zone_superfanship": + zone_superfan_cnt += cnt + zone_superfan_money += money + elif product_id == "h5_zone_moment": + zone_moment_cnt += cnt + zone_moment_money += money + else: + other_cnt += cnt + other_money += money + streamer_dias = self.get_streamer_dias() + streamer_money = int(streamer_dias / 10) + official_money = total_money - streamer_money + data = [ + get_time_str_by_ts(self.st)[:10], + membership_cnt, membership_money, + zone_admission_cnt, zone_admission_money, + zone_superfan_cnt, zone_superfan_money, + zone_moment_cnt, zone_moment_money, + other_cnt, other_money, + total_money, streamer_money, official_money, + "%.2f%%" % (safe_div(official_money, total_money)), + "%.2f%%" % (safe_div(membership_money, total_money)), + "%.2f%%" % (safe_div(zone_admission_money, total_money)), + "%.2f%%" % (safe_div(zone_superfan_money, total_money)), + "%.2f%%" % (safe_div(zone_moment_money, total_money)), + "%.2f%%" % (safe_div(other_money, total_money)) + ] + csv_w.append([data]) + print(data) + + +# st_et_str_map = gen_st_et_str_map_v2( +# "2024-04-30 00:00:00", "2024-05-06 00:00:00" +# ) + +st_et_str_map = gen_st_et_str_map( + "2024-04-30 00:00:00", 1 +) + +for st_str, et_str in st_et_str_map.items(): + s = S( + get_ts_by_str(st_str), get_ts_by_str(et_str) + ) + s.proc()