| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515 | import osimport shutilimport jsonimport threadingimport timefrom xml.dom.minidom import parseStringfrom instagram_private_api import ClientConnectionErrorfrom instagram_private_api import ClientErrorfrom instagram_private_api import ClientThrottledErrorfrom instagram_private_api_extensions import livefrom instagram_private_api_extensions import replaytry:    import logger    import helpers    import pil    import dlfuncs    import assembler    from constants import Constants    from comments import CommentsDownloaderexcept ImportError:    from . import logger    from . import helpers    from . import pil    from . import assembler    from . import dlfuncs    from .constants import Constants    from .comments import CommentsDownloaderdef get_stream_duration(duration_type):    try:        # For some reason the published_time is roughly 40 seconds behind real world time        if duration_type == 0: # Airtime duration            stream_started_mins, stream_started_secs = divmod((int(time.time()) - pil.livestream_obj.get("published_time")), 60)        if duration_type == 1: # Download duration            stream_started_mins, stream_started_secs = divmod((int(time.time()) - int(pil.epochtime)), 60)        if duration_type == 2: # Missing duration            if (int(pil.epochtime) - pil.livestream_obj.get("published_time")) <= 0:                stream_started_mins, stream_started_secs = 0, 0 # Download started 'earlier' than actual broadcast, assume started at the same time instead            else:                stream_started_mins, stream_started_secs = divmod((int(pil.epochtime) - pil.livestream_obj.get("published_time")), 60)        if stream_started_mins < 0:            stream_started_mins = 0        if stream_started_secs < 0:            stream_started_secs = 0        stream_duration_str = '%d minutes' % stream_started_mins        if stream_started_secs:            stream_duration_str += ' and %d seconds' % stream_started_secs        return stream_duration_str    except Exception:        return "Not available"def get_user_id():    is_user_id = False    user_id = None    try:        user_id = int(pil.dl_user)        is_user_id = True    except ValueError:        try:            user_res = pil.ig_api.username_info(pil.dl_user)            user_id = user_res.get('user', {}).get('pk')        except ClientConnectionError as cce:            logger.error(                "Could not get user info for '{:s}': {:d} {:s}".format(pil.dl_user, cce.code, str(cce)))            if "getaddrinfo failed" in str(cce):                logger.error('Could not resolve host, check your internet connection.')            if "timed out" in str(cce):                logger.error('The connection timed out, check your internet connection.')        except ClientThrottledError as cte:            logger.error(                "Could not get user info for '{:s}': {:d} {:s}".format(pil.dl_user, cte.code, str(cte)))        except ClientError as ce:            logger.error(                "Could not get user info for '{:s}': {:d} {:s}".format(pil.dl_user, ce.code, str(ce)))            if "Not Found" in str(ce):                logger.error('The specified user does not exist.')        except Exception as e:            logger.error("Could not get user info for '{:s}': {:s}".format(pil.dl_user, str(e)))        except KeyboardInterrupt:            logger.binfo("Aborted getting user info for '{:s}', exiting.".format(pil.dl_user))    if user_id and is_user_id:        logger.info("Getting info for '{:s}' successful. Assuming input is an user Id.".format(pil.dl_user))        logger.separator()        return user_id    elif user_id:        logger.info("Getting info for '{:s}' successful.".format(pil.dl_user))        logger.separator()        return user_id    else:        return Nonedef get_broadcasts_info():    try:        user_id = get_user_id()        if user_id:            broadcasts = pil.ig_api.user_story_feed(user_id)            pil.livestream_obj = broadcasts.get('broadcast')            pil.replays_obj = broadcasts.get('post_live_item', {}).get('broadcasts', [])            return True        else:            return False    except ClientThrottledError:        logger.error('Could not check because you are making too many requests at this time.')        return False    except Exception as e:        logger.error('Could not finish checking: {:s}'.format(str(e)))        if "timed out" in str(e):            logger.error('The connection timed out, check your internet connection.')        if "login_required" in str(e):            logger.error('Login cookie was loaded but user is not actually logged in. Delete the cookie file and try '                         'again.')        return False    except KeyboardInterrupt:        logger.binfo('Aborted checking for livestreams and replays, exiting.')        return Falsedef merge_segments():    try:        if pil.run_at_finish:            try:                thread = threading.Thread(target=helpers.run_command, args=(pil.run_at_finish,))                thread.daemon = True                thread.start()                logger.binfo("Launched finish command: {:s}".format(pil.run_at_finish))            except Exception as e:                logger.warn('Could not execute command: {:s}'.format(str(e)))        live_mp4_file = '{}{}_{}_{}_{}_live.mp4'.format(pil.dl_path, pil.datetime_compat, pil.dl_user,                                                     pil.livestream_obj.get('id'), pil.epochtime)        live_segments_path = os.path.normpath(pil.broadcast_downloader.output_dir)        if pil.segments_json_thread_worker and pil.segments_json_thread_worker.is_alive():            pil.kill_segment_thread = True            pil.segments_json_thread_worker.join()        if pil.comment_thread_worker and pil.comment_thread_worker.is_alive():            logger.info("Waiting for comment downloader to finish.")            pil.comment_thread_worker.join()        try:            if not pil.skip_merge:                logger.info('Merging downloaded files into video.')                pil.broadcast_downloader.stitch(live_mp4_file, cleartempfiles=pil.clear_temp_files)                logger.info('Successfully merged downloaded files into video.')                if pil.clear_temp_files:                    helpers.remove_temp_folder()            else:                logger.binfo("Merging of downloaded files has been disabled.")                logger.binfo("Use --assemble command to manually merge downloaded segments.")            helpers.remove_lock()        except ValueError as e:            logger.separator()            logger.error('Could not merge downloaded files: {:s}'.format(str(e)))            if os.listdir(live_segments_path):                logger.separator()                logger.binfo("Segment directory is not empty. Trying to merge again.")                logger.separator()                pil.assemble_arg = live_mp4_file.replace(".mp4", "_downloads.json")                assembler.assemble(user_called=False)            else:                logger.separator()                logger.error("Segment directory is empty. There is nothing to merge.")                logger.separator()            helpers.remove_lock()        except Exception as e:            logger.error('Could not merge downloaded files: {:s}'.format(str(e)))            helpers.remove_lock()    except KeyboardInterrupt:        logger.binfo('Aborted merging process, no video was created.')        helpers.remove_lock()def download_livestream():    try:        def print_status(sep=True):            if pil.do_heartbeat:                heartbeat_info = pil.ig_api.broadcast_heartbeat_and_viewercount(pil.livestream_obj.get('id'))            viewers = pil.livestream_obj.get('viewer_count', 0) + 1            if sep:                logger.separator()            else:                logger.info('Username    : {:s}'.format(pil.dl_user))            logger.info('Viewers     : {:s} watching'.format(str(int(viewers))))            logger.info('Airing time : {:s}'.format(get_stream_duration(0)))            if pil.do_heartbeat:                logger.info('Status      : {:s}'.format(heartbeat_info.get('broadcast_status').title()))                return heartbeat_info.get('broadcast_status') not in ['active', 'interrupted']            else:                return None        mpd_url = (pil.livestream_obj.get('dash_manifest')                   or pil.livestream_obj.get('dash_abr_playback_url')                   or pil.livestream_obj.get('dash_playback_url'))        pil.live_folder_path = '{}{}_{}_{}_{}_live_downloads'.format(pil.dl_path, pil.datetime_compat, pil.dl_user,                                                     pil.livestream_obj.get('id'), pil.epochtime)        pil.broadcast_downloader = live.Downloader(            mpd=mpd_url,            output_dir=pil.live_folder_path,            user_agent=pil.ig_api.user_agent,            max_connection_error_retry=3,            duplicate_etag_retry=30,            callback_check=print_status,            mpd_download_timeout=3,            download_timeout=3,            ffmpeg_binary=pil.ffmpeg_path)    except Exception as e:        logger.error('Could not start downloading livestream: {:s}'.format(str(e)))        logger.separator()        helpers.remove_lock()    try:        broadcast_owner = pil.livestream_obj.get('broadcast_owner', {}).get('username')        try:            broadcast_guest = pil.livestream_obj.get('cobroadcasters', {})[0].get('username')        except Exception:            broadcast_guest = None        if broadcast_owner != pil.dl_user:            logger.binfo('This livestream is a dual-live, the owner is "{}".'.format(broadcast_owner))            broadcast_guest = None        if broadcast_guest:            logger.binfo('This livestream is a dual-live, the current guest is "{}".'.format(broadcast_guest))            pil.has_guest = broadcast_guest        logger.separator()        print_status(False)        logger.separator()        helpers.create_lock_folder()        pil.segments_json_thread_worker = threading.Thread(target=helpers.generate_json_segments)        pil.segments_json_thread_worker.start()        logger.info('Downloading livestream, press [CTRL+C] to abort.')        if pil.run_at_start:            try:                thread = threading.Thread(target=helpers.run_command, args=(pil.run_at_start,))                thread.daemon = True                thread.start()                logger.binfo("Launched start command: {:s}".format(pil.run_at_start))            except Exception as e:                logger.warn('Could not launch command: {:s}'.format(str(e)))        if pil.dl_comments:            try:                comments_json_file = '{}{}_{}_{}_{}_live_comments.json'.format(                    pil.dl_path, pil.datetime_compat, pil.dl_user, pil.livestream_obj.get('id'), pil.epochtime)                pil.comment_thread_worker = threading.Thread(target=get_live_comments, args=(comments_json_file,))                pil.comment_thread_worker.start()            except Exception as e:                logger.error('An error occurred while downloading comments: {:s}'.format(str(e)))        pil.broadcast_downloader.run()        logger.separator()        logger.info("The livestream has been ended by the user.")        logger.separator()        logger.info('Airtime duration  : {}'.format(get_stream_duration(0)))        logger.info('Download duration : {}'.format(get_stream_duration(1)))        logger.info('Missing (approx.) : {}'.format(get_stream_duration(2)))        logger.separator()        merge_segments()    except KeyboardInterrupt:        logger.separator()        logger.binfo('The download has been aborted.')        logger.separator()        logger.info('Airtime duration  : {}'.format(get_stream_duration(0)))        logger.info('Download duration : {}'.format(get_stream_duration(1)))        logger.info('Missing (approx.) : {}'.format(get_stream_duration(2)))        logger.separator()        if not pil.broadcast_downloader.is_aborted:            pil.broadcast_downloader.stop()            merge_segments()def download_replays():    try:        try:            logger.info('Amount of replays    : {:s}'.format(str(len(pil.replays_obj))))            for replay_index, replay_obj in enumerate(pil.replays_obj):                bc_dash_manifest = parseString(replay_obj.get('dash_manifest')).getElementsByTagName('Period')                bc_duration_raw = bc_dash_manifest[0].getAttribute("duration")                bc_minutes = (bc_duration_raw.split("H"))[1].split("M")[0]                bc_seconds = ((bc_duration_raw.split("M"))[1].split("S")[0]).split('.')[0]                logger.info(                    'Replay {:s} duration    : {:s} minutes and {:s} seconds'.format(str(replay_index + 1), bc_minutes,                                                                                     bc_seconds))        except Exception as e:            logger.warn("An error occurred while getting replay duration information: {:s}".format(str(e)))        logger.separator()        logger.info("Downloading replays, press [CTRL+C] to abort.")        logger.separator()        for replay_index, replay_obj in enumerate(pil.replays_obj):            exists = False            pil.livestream_obj = replay_obj            dl_path_files = os.listdir(pil.dl_path)            for dl_path_file in dl_path_files:                if (str(replay_obj.get('id')) in dl_path_file) and ("_replay" in dl_path_file) and (dl_path_file.endswith(".mp4")):                    logger.binfo("Already downloaded replay {:d} with ID '{:s}'.".format(replay_index + 1, str(replay_obj.get('id'))))                    exists = True            if not exists:                current = replay_index + 1                logger.info(                    "Downloading replay {:s} of {:s} with ID '{:s}'.".format(str(current), str(len(pil.replays_obj)),                                                                               str(replay_obj.get('id'))))                pil.live_folder_path = '{}{}_{}_{}_{}_replay_downloads'.format(                    pil.dl_path, pil.datetime_compat, pil.dl_user, pil.livestream_obj.get('id'), replay_obj.get("published_time"))                broadcast_downloader = replay.Downloader(                    mpd=replay_obj.get('dash_manifest'),                    output_dir=pil.live_folder_path,                    user_agent=pil.ig_api.user_agent,                    ffmpeg_binary=pil.ffmpeg_path)                if pil.use_locks:                    helpers.create_lock_folder()                replay_mp4_file = '{}{}_{}_{}_{}_replay.mp4'.format(                    pil.dl_path, pil.datetime_compat, pil.dl_user, pil.livestream_obj.get('id'), replay_obj.get("published_time"))                comments_json_file = '{}{}_{}_{}_{}_replay_comments.json'.format(                    pil.dl_path, pil.datetime_compat, pil.dl_user, pil.livestream_obj.get('id'), replay_obj.get("published_time"))                pil.comment_thread_worker = threading.Thread(target=get_replay_comments, args=(comments_json_file,))                broadcast_downloader.download(replay_mp4_file, cleartempfiles=pil.clear_temp_files)                if pil.clear_temp_files:                    helpers.remove_temp_folder()                if pil.dl_comments:                    logger.info("Downloading replay comments.")                    try:                        get_replay_comments(comments_json_file)                    except Exception as e:                        logger.error('An error occurred while downloading comments: {:s}'.format(str(e)))                logger.info("Finished downloading replay {:s} of {:s}.".format(str(current), str(len(pil.replays_obj))))                helpers.remove_lock()                if current != len(pil.replays_obj):                    logger.separator()        logger.separator()        logger.info("Finished downloading all available replays.")        helpers.remove_lock()    except Exception as e:        logger.error('Could not save replay: {:s}'.format(str(e)))        helpers.remove_lock()    except KeyboardInterrupt:        logger.separator()        logger.binfo('The download has been aborted by the user, exiting.')        helpers.remove_temp_folder()        helpers.remove_lock()def download_following():    try:        is_checking = ''        if pil.dl_lives and pil.dl_replays:            is_checking = 'livestreams or replays'        elif pil.dl_lives and not pil.dl_replays:            is_checking = 'livestreams'        elif not pil.dl_lives and pil.dl_replays:            is_checking = 'replays'        logger.info("Checking following users for any {:s}.".format(is_checking))        broadcast_f_list = pil.ig_api.reels_tray()        usernames_available_livestreams = []        usernames_available_replays = []        if broadcast_f_list['broadcasts'] and pil.dl_lives:            for broadcast_f in broadcast_f_list['broadcasts']:                username = broadcast_f['broadcast_owner']['username']                if username not in usernames_available_livestreams:                    usernames_available_livestreams.append(username)        if broadcast_f_list.get('post_live', {}).get('post_live_items', []) and pil.dl_replays:            for broadcast_r in broadcast_f_list.get('post_live', {}).get('post_live_items', []):                for broadcast_f in broadcast_r.get("broadcasts", []):                    username = broadcast_f['broadcast_owner']['username']                    if username not in usernames_available_replays:                        usernames_available_replays.append(username)        logger.separator()        available_total = list(usernames_available_livestreams)        available_total.extend(x for x in usernames_available_replays if x not in available_total)        if available_total:            logger.info("The following users have available {:s}.".format(is_checking))            logger.info(', '.join(available_total))            logger.separator()            iterate_users(available_total)        else:            logger.info("There are currently no available {:s}.".format(is_checking))            logger.separator()    except Exception as e:        logger.error("Could not finish checking following users: {:s}".format(str(e)))    except KeyboardInterrupt:        logger.separator()        logger.binfo('The checking process has been aborted by the user.')        logger.separator()def iterate_users(user_list):    for user in user_list:        try:            if os.path.isfile(os.path.join(pil.dl_path, user + '.lock')):                logger.warn("Lock file is already present for '{:s}', there is probably another download "                            "ongoing!".format(user))                logger.warn(                    "If this is not the case, manually delete the file '{:s}' and try again.".format(user + '.lock'))            else:                logger.info("Launching daemon process for '{:s}'.".format(user))                start_result = helpers.run_command("{:s} -d {:s} -cp '{:s}' -dp '{:s}' {:s} {:s} {:s} {:s}".format(                    ("'" + pil.winbuild_path + "'") if pil.winbuild_path else "pyinstalive",                    user,                    pil.config_path,                    pil.dl_path,                    '--no-lives' if not pil.dl_lives else '',                    '--no-replays' if not pil.dl_replays else '',                    '--no-heartbeat' if not pil.do_heartbeat else '',                    '--username {:s} --password {:s}'.format(pil.ig_user, pil.ig_pass) if pil.config_login_overridden else ''))                if start_result:                    logger.warn("Could not start process: {:s}".format(str(start_result)))                else:                    logger.info("Process started successfully.")            logger.separator()            time.sleep(2)        except Exception as e:            logger.warn("Could not start process: {:s}".format(str(e)))        except KeyboardInterrupt:            logger.binfo('The process launching has been aborted by the user.')            logger.separator()            breakdef get_live_comments(comments_json_file):    try:        comments_downloader = CommentsDownloader(destination_file=comments_json_file)        first_comment_created_at = 0        try:            while not pil.broadcast_downloader.is_aborted:                if 'initial_buffered_duration' not in pil.livestream_obj and pil.broadcast_downloader.initial_buffered_duration:                    pil.livestream_obj['initial_buffered_duration'] = pil.broadcast_downloader.initial_buffered_duration                    comments_downloader.broadcast = pil.livestream_obj                first_comment_created_at = comments_downloader.get_live(first_comment_created_at)        except ClientError as e:            if not 'media has been deleted' in e.error_response:                logger.warn("Comment collection ClientError: %d %s" % (e.code, e.error_response))        try:            if comments_downloader.comments:                comments_downloader.save()                comments_log_file = comments_json_file.replace('.json', '.log')                comment_errors, total_comments = CommentsDownloader.generate_log(                    comments_downloader.comments, pil.epochtime, comments_log_file,                    comments_delay=pil.broadcast_downloader.initial_buffered_duration)                if len(comments_downloader.comments) == 1:                    logger.info("Successfully saved 1 comment.")                    #os.remove(comments_json_file)                    logger.separator()                    return True                else:                    if comment_errors:                        logger.warn(                            "Successfully saved {:s} comments but {:s} comments are (partially) missing.".format(                                str(total_comments), str(comment_errors)))                    else:                        logger.info("Successfully saved {:s} comments.".format(str(total_comments)))                    #os.remove(comments_json_file)                    logger.separator()                    return True            else:                logger.info("There are no available comments to save.")                logger.separator()                return False        except Exception as e:            logger.error('Could not save comments: {:s}'.format(str(e)))            return False    except KeyboardInterrupt as e:        logger.binfo("Downloading livestream comments has been aborted.")        return Falsedef get_replay_comments(comments_json_file):    try:        comments_downloader = CommentsDownloader(destination_file=comments_json_file)        comments_downloader.get_replay()        try:            if comments_downloader.comments:                comments_log_file = comments_json_file.replace('.json', '.log')                comment_errors, total_comments = CommentsDownloader.generate_log(                    comments_downloader.comments, pil.livestream_obj.get('published_time'), comments_log_file,                    comments_delay=0)                if total_comments == 1:                    logger.info("Successfully saved 1 comment to logfile.")                    #os.remove(comments_json_file)                    logger.separator()                    return True                else:                    if comment_errors:                        logger.warn(                            "Successfully saved {:s} comments but {:s} comments are (partially) missing.".format(                                str(total_comments), str(comment_errors)))                    else:                        logger.info("Successfully saved {:s} comments.".format(str(total_comments)))                    #os.remove(comments_json_file)                    logger.separator()                    return True            else:                logger.info("There are no available comments to save.")                return False        except Exception as e:            logger.error('Could not save comments to logfile: {:s}'.format(str(e)))            return False    except KeyboardInterrupt as e:        logger.binfo("Downloading replay comments has been aborted.")        return False
 |