Pārlūkot izejas kodu

Removed comment downloading, some minor text layout improvements

notcammy 7 gadi atpakaļ
vecāks
revīzija
38244e6d1f
4 mainītis faili ar 20 papildinājumiem un 234 dzēšanām
  1. 2 2
      pyinstalive/auth.py
  2. 0 139
      pyinstalive/comments.py
  3. 17 83
      pyinstalive/downloader.py
  4. 1 10
      pyinstalive/initialize.py

+ 2 - 2
pyinstalive/auth.py

@@ -93,9 +93,9 @@ def login(username, password, show_cookie_expiry, ignore_existing_cookie):
 		seperator("GREEN")
 		sys.exit(99)
 
-	log('[I] Login to "' + api.authenticated_user_name + '" OK!', "GREEN")
+	log('[I] Logging in to user "' + api.authenticated_user_name + '" successful.', "GREEN")
 	if show_cookie_expiry.title() == 'True' and ignore_existing_cookie == False:
 		cookie_expiry = api.cookie_jar.expires_earliest
-		log('[I] Login cookie expiry date: {0!s}'.format(datetime.datetime.fromtimestamp(cookie_expiry).strftime('%Y-%m-%d at %H:%M:%S')), "GREEN")
+		log('[I] Login cookie expiry date: {0!s}'.format(datetime.datetime.fromtimestamp(cookie_expiry).strftime('%Y-%m-%d at %I:%M:%S %p')), "GREEN")
 
 	return api

+ 0 - 139
pyinstalive/comments.py

@@ -1,139 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import time
-import json
-import codecs
-import sys
-from socket import timeout, error as SocketError
-from ssl import SSLError
-try:
-	# py2
-	from urllib2 import URLError
-	from httplib import HTTPException
-except ImportError:
-	# py3
-	from urllib.error import URLError
-	from http.client import HTTPException
-
-from instagram_private_api import ClientError
-from .logger import log, seperator
-
-"""
-This feature of PyInstaLive was originally written by https://github.com/taengstagram
-The code below and in downloader.py that's related to the comment downloading
-feature is modified by https://github.com/notcammy
-"""
-
-
-class CommentsDownloader(object):
-
-	def __init__(self, api, broadcast, destination_file):
-		self.api = api
-		self.broadcast = broadcast
-		self.destination_file = destination_file
-		self.comments = []
-
-	def get_live(self, first_comment_created_at=0):
-		comments_collected = self.comments
-
-		before_count = len(comments_collected)
-		try:
-			comments_res = self.api.broadcast_comments(
-				self.broadcast['id'], last_comment_ts=first_comment_created_at)
-			comments = comments_res.get('comments', [])
-			first_comment_created_at = (
-				comments[0]['created_at_utc'] if comments else int(time.time() - 5))
-			comments_collected.extend(comments)
-			after_count = len(comments_collected)
-			if after_count > before_count:
-				broadcast = self.broadcast.copy()
-				broadcast.pop('segments', None)     # save space
-				broadcast['comments'] = comments_collected
-				with open(self.destination_file, 'w') as outfile:
-					json.dump(broadcast, outfile, indent=2)
-			self.comments = comments_collected
-
-		except (SSLError, timeout, URLError, HTTPException, SocketError) as e:
-			log('[W] Comment collection error: %s' % e, "YELLOW")
-		except ClientError as e:
-			if e.code == 500:
-				log('[W] Comment collection ClientError: %d %s' % (e.code, e.error_response), "YELLOW")
-			elif e.code == 400 and not e.msg:
-				log('[W] Comment collection ClientError: %d %s' % (e.code, e.error_response), "YELLOW")
-			else:
-				raise e
-		finally:
-			time.sleep(4)
-		return first_comment_created_at
-
-	def get_replay(self):
-		comments_collected = []
-		starting_offset = 0
-		encoding_tag = self.broadcast['encoding_tag']
-		while True:
-			comments_res = self.api.replay_broadcast_comments(
-				self.broadcast['id'], starting_offset=starting_offset, encoding_tag=encoding_tag)
-			starting_offset = comments_res.get('ending_offset', 0)
-			comments = comments_res.get('comments', [])
-			comments_collected.extend(comments)
-			if not comments_res.get('comments') or not starting_offset:
-				break
-			time.sleep(4)
-
-		if comments_collected:
-			self.broadcast['comments'] = comments_collected
-			self.broadcast['initial_buffered_duration'] = 0
-			with open(self.destination_file, 'w') as outfile:
-				json.dump(self.broadcast, outfile, indent=2)
-		self.comments = comments_collected
-
-	def save(self):
-		broadcast = self.broadcast.copy()
-		broadcast.pop('segments', None)
-		broadcast['comments'] = self.comments
-		with open(self.destination_file, 'w') as outfile:
-			json.dump(broadcast, outfile, indent=2)
-
-	@staticmethod
-	def generate_log(comments, download_start_time, srt_file, comments_delay=10.0):
-		python_version = sys.version.split(' ')[0]
-		subtitles_timeline = {}
-		for i, c in enumerate(comments):
-			if 'offset' in c:
-				for k in c['comment'].keys():
-					c[k] = c['comment'][k]
-				c['created_at_utc'] = download_start_time + c['offset']
-			created_at_utc = str(2 * (c['created_at_utc'] // 2))
-			comment_list = subtitles_timeline.get(created_at_utc) or []
-			comment_list.append(c)
-			subtitles_timeline[created_at_utc] = comment_list
-
-		if subtitles_timeline:
-			timestamps = sorted(subtitles_timeline.keys())
-			mememe = False
-			subs = []
-			for i, tc in enumerate(timestamps):
-				t = subtitles_timeline[tc]
-				clip_start = int(tc) - int(download_start_time) + int(comments_delay)
-				if clip_start < 0:
-					clip_start = 0
-
-				srt = ''
-
-				if sys.version.split(' ')[0].startswith('2'):
-					for c in t:
-						if (c['user']['is_verified']):
-							srt += '{}{}\n\n'.format(time.strftime('%H:%M:%S\n', time.gmtime(clip_start)), '{} {}: {}'.format(c['user']['username'], "(V)", c['text'].encode('ascii', 'xmlcharrefreplace')))
-						else:
-							srt += '{}{}\n\n'.format(time.strftime('%H:%M:%S\n', time.gmtime(clip_start)), '{}: {}'.format(c['user']['username'], c['text'].encode('ascii', 'xmlcharrefreplace')))
-				else:
-					for c in t:
-							if (c['user']['is_verified']):
-								srt += '{}{}\n\n'.format(time.strftime('%H:%M:%S\n', time.gmtime(clip_start)), '{} {}: {}'.format(c['user']['username'], "(v)", c['text']))
-							else:
-								srt += '{}{}\n\n'.format(time.strftime('%H:%M:%S\n', time.gmtime(clip_start)), '{}: {}'.format(c['user']['username'], c['text']))
-
-				subs.append(srt)
-
-			with codecs.open(srt_file, 'w', 'utf-8-sig') as srt_outfile:
-				srt_outfile.write(''.join(subs))

+ 17 - 83
pyinstalive/downloader.py

@@ -9,8 +9,6 @@ from instagram_private_api_extensions import live, replay
 from instagram_private_api import ClientError
 
 from .logger import log, seperator
-from .comments import CommentsDownloader
-
 class NoLivestreamException(Exception):
 	pass
 
@@ -53,11 +51,12 @@ def record_stream(broadcast):
 		def print_status(sep=True):
 			heartbeat_info = api.broadcast_heartbeat_and_viewercount(broadcast['id'])
 			viewers = broadcast.get('viewer_count', 0)
+			if sep:
+				seperator("GREEN")
 			log('[I] Viewers     : ' + str(int(viewers)) + " watching", "GREEN")
 			log('[I] Airing time : ' + get_stream_duration(broadcast).title(), "GREEN")
 			log('[I] Status      : ' + heartbeat_info['broadcast_status'].title(), "GREEN")
-			if sep:
-				seperator("GREEN")
+
 			return heartbeat_info['broadcast_status'] not in ['active', 'interrupted'] 
 
 		mpd_url = (broadcast.get('dash_manifest')
@@ -80,8 +79,8 @@ def record_stream(broadcast):
 		seperator("GREEN")
 		sys.exit(1)
 	try:
-		seperator("GREEN")
 		log('[I] Livestream downloading started...', "GREEN")
+		seperator("GREEN")
 		log('[I] Username    : ' + record, "GREEN")
 		print_status(False)
 		log('[I] MPD URL     : ' + mpd_url, "GREEN")
@@ -98,34 +97,20 @@ def record_stream(broadcast):
 				log('[W] Could not run file: ' + str(e), "YELLOW")
 
 
-
-		if settings.save_comments.title() == "True":
-			try:
-				comments_json_file = settings.save_path + '{}_{}_{}_{}_live_comments.json'.format(settings.current_date, record, broadcast['id'], settings.current_time)
-				comment_thread_worker = None
-				comment_thread_worker = threading.Thread(target=get_live_comments, args=(api, broadcast, comments_json_file, dl,))
-				comment_thread_worker.start()
-			except Exception as e:
-				log('[E] An error occurred while checking comments: ' + e, "RED")			
-
-
-
 		dl.run()
+		seperator("GREEN")
 		log('[I] The livestream has ended. (Duration: ' + get_stream_duration(broadcast) + ")", "GREEN")
-
-		stitch_video(dl, broadcast, comment_thread_worker)
+		seperator("GREEN")
+		stitch_video(dl, broadcast)
 	except KeyboardInterrupt:
 		seperator("GREEN")
-		log('[W] Download has been aborted.', "YELLOW")
+		log('[W] Download has been aborted by the user.', "YELLOW")
 		seperator("GREEN")
 		if not dl.is_aborted:
 			dl.stop()
-			stitch_video(dl, broadcast, comment_thread_worker)
+			stitch_video(dl, broadcast)
 
-def stitch_video(dl, broadcast, comment_thread_worker):
-	if comment_thread_worker and comment_thread_worker.is_alive():
-		log("[I] Ending comment saving process...", "GREEN")
-		comment_thread_worker.join()
+def stitch_video(dl, broadcast):
 	if (settings.run_at_finish is not "None"):
 		try:
 			thread = threading.Thread(target=run_script, args=(settings.run_at_finish,))
@@ -163,7 +148,7 @@ def get_user_info(record):
 	if settings.save_replays.title() == "True": 
 		get_replays(user_id)
 	else:
-		log("", "BLUE")
+		seperator("GREEN")
 		log("[I] Replay saving is disabled either with a flag or in the config file.", "BLUE")
 		seperator("GREEN")
 		sys.exit(0)
@@ -178,7 +163,7 @@ def get_livestreams(user_id):
 		else:
 			record_stream(broadcast)
 	except NoLivestreamException as e:
-		log('[I] ' + str(e), "BLUE")
+		log('[I] ' + str(e), "YELLOW")
 	except Exception as e:
 		if (e.__class__.__name__ is not NoLivestreamException):
 			log('[E] Could not get livestreams info: ' + str(e), "RED")
@@ -201,7 +186,7 @@ def get_replays(user_id):
 			raise NoReplayException('There are no replays available.')
 		else:
 			log("[I] Available replays have been found to download, press [CTRL+C] to abort.", "GREEN")
-			log("", "GREEN")
+			seperator("GREEN")
 			for index, broadcast in enumerate(broadcasts):
 				exists = False
 
@@ -232,11 +217,6 @@ def get_replays(user_id):
 					if (len(replay_saved) == 1):
 						log("[I] Finished downloading replay " + str(current) + " of "  + str(len(broadcasts)) + ".", "GREEN")
 						seperator("GREEN")
-					if settings.save_comments.title() == "True":
-						log("[I] Checking for available comments to save...", "GREEN")
-						comments_json_file = settings.save_path + '{}_{}_{}_{}_replay_comments.json'.format(settings.current_date, record, broadcast['id'], settings.current_time)
-						get_replay_comments(api, broadcast, comments_json_file, dl)
-
 					else:
 						log("[W] No output video file was made, please merge the files manually.", "RED")
 						log("[W] Check if ffmpeg is available by running ffmpeg in your terminal.", "RED")
@@ -245,7 +225,7 @@ def get_replays(user_id):
 		seperator("GREEN")
 		sys.exit(0)
 	except NoReplayException as e:
-		log('[I] ' + str(e), "BLUE")
+		log('[I] ' + str(e), "YELLOW")
 		seperator("GREEN")
 		sys.exit(0)
 	except Exception as e:
@@ -253,58 +233,12 @@ def get_replays(user_id):
 		seperator("GREEN")
 		sys.exit(1)
 	except KeyboardInterrupt:
-		log("", "GREEN")
+		seperator("GREEN")
 		log('[W] Download has been aborted by the user.', "YELLOW")
+		seperator("GREEN")
 		try:
 			shutil.rmtree(output_dir)
 		except Exception as e:
 			log("[E] Could not remove temp folder: " + str(e), "RED")
 			sys.exit(1)
-		sys.exit(0)
-
-
-def get_replay_comments(api, broadcast, comments_json_file, dl):
-	cdl = CommentsDownloader(
-		api=api, broadcast=broadcast, destination_file=comments_json_file)
-	cdl.get_replay()
-
-	if cdl.comments:
-		comments_log_file = comments_json_file.replace('.json', '.log')
-		CommentsDownloader.generate_log(
-			cdl.comments, broadcast['published_time'], comments_log_file,
-			comments_delay=0)
-		log("[I] Successfully saved comments to logfile.", "GREEN")
-		seperator("GREEN")
-	else:
-		log("[I] There are no available comments to save.", "GREEN")
-		seperator("GREEN")
-
-def get_live_comments(api, broadcast, comments_json_file, dl):
-	cdl = CommentsDownloader(
-		api=api, broadcast=broadcast, destination_file=comments_json_file)
-	first_comment_created_at = 0
-	try:
-		while not dl.is_aborted:
-			if 'initial_buffered_duration' not in broadcast and dl.initial_buffered_duration:
-				broadcast['initial_buffered_duration'] = dl.initial_buffered_duration
-				cdl.broadcast = broadcast
-			first_comment_created_at = cdl.get_live(first_comment_created_at)
-	except ClientError as e:
-		if not 'media has been deleted' in e.error_response:
-			log("[W] Comment collection ClientError: %d %s" % (e.code, e.error_response), "YELLOW")
-
-	try:
-		if cdl.comments:
-			log("[I] Checking for available comments to save...", "GREEN")
-			cdl.save()
-			comments_log_file = comments_json_file.replace('.json', '.log')
-			CommentsDownloader.generate_log(
-				cdl.comments, settings.current_time, comments_log_file,
-				comments_delay=dl.initial_buffered_duration)
-			log("[I] Successfully saved comments to logfile.", "GREEN")
-			seperator("GREEN")
-		else:
-			log("[I] There are no available comments to save.", "GREEN")
-			seperator("GREEN")
-	except Exception as e:
-		log('[E] Could not save comments to logfile: ' + str(e), "RED")
+		sys.exit(0)

+ 1 - 10
pyinstalive/initialize.py

@@ -98,15 +98,6 @@ def check_config_validity(config):
 			settings.run_at_finish = "None"
 
 
-		try:
-			settings.save_comments = config.get('pyinstalive', 'save_comments').title()
-			if not settings.show_cookie_expiry in bool_values:
-				log("[W] Invalid or missing setting detected for 'save_comments', using default value (True)", "YELLOW")
-				settings.save_comments = 'true'
-		except:
-			log("[W] Invalid or missing setting detected for 'save_comments', using default value (True)", "YELLOW")
-			settings.save_comments = 'true'
-
 
 		try:
 			settings.save_path = config.get('pyinstalive', 'save_path')
@@ -239,7 +230,7 @@ def new_config():
 
 def run():
 	seperator("GREEN")
-	log('PYINSTALIVE (SCRIPT V{} - PYTHON V{}) - {}'.format(script_version, python_version, time.strftime('%H:%M:%S %p')), "GREEN")
+	log('PYINSTALIVE (SCRIPT V{} - PYTHON V{}) - {}'.format(script_version, python_version, time.strftime('%I:%M:%S %p')), "GREEN")
 	seperator("GREEN")
 
 	logging.disable(logging.CRITICAL)