|
|
@@ -0,0 +1,70 @@
|
|
|
+# -*- coding: utf-8 -*-
|
|
|
+import pymysql
|
|
|
+import csv
|
|
|
+
|
|
|
+class OfpayDataSyncer:
|
|
|
+
|
|
|
+ def __init__(self):
|
|
|
+ self.db_conn = None;
|
|
|
+ self.connect_mysql();
|
|
|
+
|
|
|
+ def connect_mysql(self):
|
|
|
+ config = {
|
|
|
+ 'host':'47.106.225.136',
|
|
|
+ 'port':3306,
|
|
|
+ 'user':'root',
|
|
|
+ 'passwd':'sjojo123456',
|
|
|
+ 'database':'mitmproxy',
|
|
|
+ 'charset':'utf8'
|
|
|
+ };
|
|
|
+ db_conn = None;
|
|
|
+ while True:
|
|
|
+ try:
|
|
|
+ db_conn = pymysql.connect(**config);
|
|
|
+ db_conn.ping(reconnect=True);
|
|
|
+ except pymysql.OperationalError as e:
|
|
|
+ print(e);
|
|
|
+ print('连接断开,正在尝试重新连接...');
|
|
|
+ if db_conn:
|
|
|
+ db_conn.close();
|
|
|
+ db_conn = pymysql.connect(**config);
|
|
|
+ time.sleep(1);
|
|
|
+ else:
|
|
|
+ break;
|
|
|
+ self.db_conn = db_conn;
|
|
|
+
|
|
|
+ def sync(self, filename='results.csv'):
|
|
|
+ sql_query = f'''
|
|
|
+ SELECT * FROM elife WHERE update_time >= CURDATE() AND update_time < CURDATE() + INTERVAL 1 DAY;
|
|
|
+ ''';
|
|
|
+ cursor = self.db_conn.cursor();
|
|
|
+ cursor.execute(sql_query);
|
|
|
+ results = cursor.fetchall();
|
|
|
+
|
|
|
+ with open(filename, 'w', newline='', encoding='utf-8') as f:
|
|
|
+ writer = csv.writer(f);
|
|
|
+ if results:
|
|
|
+ headers = [i[0] for i in cursor.description];
|
|
|
+ writer.writerow(headers);
|
|
|
+ for row in results:
|
|
|
+ writer.writerow(row);
|
|
|
+
|
|
|
+ def read(self, filename='results.csv'):
|
|
|
+ data_dicts = []
|
|
|
+ try:
|
|
|
+ with open(filename, mode='r', encoding='utf-8') as csvfile:
|
|
|
+ reader = csv.DictReader(csvfile);
|
|
|
+ for row in reader:
|
|
|
+ data_dicts.append(row);
|
|
|
+ except IOError as e:
|
|
|
+ print(f"IO error occurred: {e}");
|
|
|
+ print(data_dicts);
|
|
|
+ return data_dicts;
|
|
|
+
|
|
|
+def main():
|
|
|
+ syncer = OfpayDataSyncer();
|
|
|
+ syncer.sync('elife_data.csv');
|
|
|
+ # syncer.read();
|
|
|
+
|
|
|
+if __name__ == "__main__":
|
|
|
+ main();
|