Why I'm Writing This
Been using Instaloader for about a year now. Started with basic post downloads, gradually moved to more complex stuff - Stories, Highlights, Reels, metadata extraction.
Not claiming this is the "right" way or that I'm some expert. Just what worked for me through trial and error.
Downloading Stories
Stories disappear after 24 hours, so you need to download them regularly. Here's what I use:
import instaloader
import time
from datetime import datetime
L = instaloader.Instaloader()
# Load session
try:
L.load_session_from_file('username', 'session')
except:
L.login('username', 'password')
L.save_session_to_file('session')
def download_stories(profile_name):
try:
profile = instaloader.Profile.from_username(L.context, profile_name)
# Get stories (only works if you follow the account or it's public)
# Note: Stories are only available while they're active (24 hours)
for story in L.get_stories([profile.userid]):
for item in story.get_items():
print(f"Downloading story from {profile_name}")
L.download_storyitem(item, target=f"{profile_name}_stories")
time.sleep(5) # Delay between story items
print(f"Downloaded stories from {profile_name}")
except Exception as e:
print(f"No stories available or error: {e}")
# Download stories from profiles you follow
profiles = ['profile1', 'profile2', 'profile3']
for profile in profiles:
download_stories(profile)
time.sleep(10) # Delay between profiles
๐ก Important
Stories only work if you follow the account or it's public. Private accounts you don't follow won't return any stories even with valid login.
Downloading Highlights
Highlights are archived stories. They don't expire like regular stories. Different approach to download them:
import instaloader
import time
L = instaloader.Instaloader()
L.load_session_from_file('username', 'session')
def download_highlights(profile_name):
try:
profile = instaloader.Profile.from_username(L.context, profile_name)
# Get all highlight reels for this profile
highlights = L.get_highlights(profile.userid)
for highlight in highlights:
print(f"Downloading highlight: {highlight.title}")
# Each highlight is a collection of story items
for item in highlight.get_items():
L.download_storyitem(item, target=f"{profile_name}_{highlight.title}")
time.sleep(3)
time.sleep(5) # Delay between highlights
print(f"Downloaded {len(list(highlights))} highlights from {profile_name}")
except Exception as e:
print(f"Error downloading highlights: {e}")
# Use it
download_highlights('target_profile')
Downloading Reels
Reels are just regular posts with video content. The standard post download works:
import instaloader
import time
L = instaloader.Instaloader()
def download_reels(profile_name):
try:
profile = instaloader.Profile.from_username(L.context, profile_name)
count = 0
for post in profile.get_posts():
# Only download video posts (Reels)
if post.is_video:
print(f"Downloading reel: {post.shortcode}")
L.download_post(post, target=profile_name)
count += 1
time.sleep(10) # Longer delay for video downloads
# Stop after 20 reels
if count >= 20:
break
print(f"Downloaded {count} reels from {profile_name}")
except Exception as e:
print(f"Error: {e}")
download_reels('target_profile')
Extracting Metadata Without Downloading
Sometimes you just want the metadata (likes, comments, caption) without downloading media files:
import instaloader
import json
import time
L = instaloader.Instaloader()
def extract_metadata(profile_name):
try:
profile = instaloader.Profile.from_username(L.context, profile_name)
metadata = {
'username': profile.username,
'full_name': profile.full_name,
'bio': profile.biography,
'followers': profile.followers,
'following': profile.followees,
'posts_count': profile.mediacount,
'is_private': profile.is_private,
'is_verified': profile.is_verified,
'profile_pic_url': profile.profile_pic_url,
'posts': []
}
# Extract post metadata (without downloading)
for post in profile.get_posts():
post_data = {
'shortcode': post.shortcode,
'url': post.url,
'caption': post.caption if post.caption else '',
'likes': post.likes,
'comments': post.comments,
'is_video': post.is_video,
'video_url': post.video_url if post.is_video else None,
'date': post.date_local.isoformat(),
'location': post.location.name if post.location else None,
'tagged_users': [t.username for t in post.tagged_users]
}
metadata['posts'].append(post_data)
time.sleep(2) # Minimal delay for metadata only
# Stop after 50 posts
if len(metadata['posts']) >= 50:
break
# Save to JSON
with open(f'{profile_name}_metadata.json', 'w') as f:
json.dump(metadata, f, indent=2)
print(f"Extracted metadata for {len(metadata['posts'])} posts")
return metadata
except Exception as e:
print(f"Error: {e}")
return None
# Use it
data = extract_metadata('target_profile')
๐ก Use Case
This is great for analytics. You can track engagement rates, posting schedules, content themes - all without filling your hard drive with media files.
Batch Processing Multiple Profiles
When downloading from multiple profiles, you need to be extra careful with rate limits:
import instaloader
import time
import random
L = instaloader.Instaloader()
L.load_session_from_file('username', 'session')
def batch_download(profiles, max_posts_per_profile=20):
results = {}
for i, profile_name in enumerate(profiles):
print(f"\n[{i+1}/{len(profiles)}] Processing {profile_name}")
try:
profile = instaloader.Profile.from_username(L.context, profile_name)
count = 0
for post in profile.get_posts():
if count >= max_posts_per_profile:
break
L.download_post(post, target=profile_name)
count += 1
# Random delay between posts
delay = random.uniform(5, 10)
time.sleep(delay)
results[profile_name] = {'status': 'success', 'downloaded': count}
# Longer delay between profiles
profile_delay = random.uniform(30, 60)
print(f"Waiting {profile_delay:.1f}s before next profile...")
time.sleep(profile_delay)
except Exception as e:
results[profile_name] = {'status': 'error', 'message': str(e)}
print(f"Error with {profile_name}: {e}")
# Wait even longer on error
time.sleep(120)
return results
# Use with a list of profiles
profiles_to_download = ['user1', 'user2', 'user3', 'user4', 'user5']
results = batch_download(profiles_to_download, max_posts_per_profile=20)
# Print summary
print("\n=== Download Summary ===")
for profile, result in results.items():
if result['status'] == 'success':
print(f"{profile}: โ {result['downloaded']} posts")
else:
print(f"{profile}: โ {result['message']}")
โ ๏ธ Warning
Batch processing is when you're most likely to get blocked. Keep batches small (5-10 profiles max), use long delays (30-60 seconds), and don't run batches daily.
Downloading Specific Posts by URL
Sometimes you just want specific posts, not entire profiles:
import instaloader
L = instaloader.Instaloader()
def download_from_url(post_url):
try:
# Extract shortcode from URL
# URL format: https://www.instagram.com/p/CODE/
shortcode = post_url.split('/')[-2]
# Load post directly
post = instaloader.Post.from_shortcode(L.context, shortcode)
# Download to a target folder
L.download_post(post, target=f"single_post_{shortcode}")
print(f"Downloaded post {shortcode}")
# Return metadata
return {
'shortcode': shortcode,
'likes': post.likes,
'comments': post.comments,
'caption': post.caption[:100] if post.caption else ''
}
except Exception as e:
print(f"Error downloading from URL: {e}")
return None
# Use it
url = "https://www.instagram.com/p/ABC123/"
post_info = download_from_url(url)
What Works Long-Term
After using Instaloader for a year, here's what keeps working:
Conservative Settings
import instaloader
import time
import random
# Configure for safe long-term use
L = instaloader.Instaloader(
download_videos=True,
download_video_thumbnails=False, # Saves space
download_geotags=False, # Less API calls
download_comments=False, # Less API calls
save_metadata=True, # JSON files with metadata
compress_json=True # Smaller files
)
# Load session
L.load_session_from_file('username', 'session')
# Use these settings
def safe_download(profile_name, limit=30):
profile = instaloader.Profile.from_username(L.context, profile_name)
for i, post in enumerate(profile.get_posts()):
if i >= limit:
break
L.download_post(post, target=profile_name)
# Progressive delays
if i < 10:
time.sleep(random.uniform(5, 8))
elif i < 20:
time.sleep(random.uniform(8, 12))
else:
time.sleep(random.uniform(12, 20))
# Longer pause every 5 posts
if (i + 1) % 5 == 0:
time.sleep(random.uniform(20, 30))
Regular Rotation
Don't hit the same profiles every day. I rotate:
- Day 1: Profiles A, B, C (20 posts each)
- Day 2: Profiles D, E, F (20 posts each)
- Day 3: No downloads (let limits reset)
- Day 4: Back to A, B, C
Monitoring Session Health
import os
import time
def check_session_health():
session_file = 'session'
if not os.path.exists(session_file):
return False
# Check file age
age = time.time() - os.path.getmtime(session_file)
if age > 6 * 3600: # 6 hours
return False
# Try to use session
try:
L = instaloader.Instaloader()
L.load_session_from_file('username', session_file)
# Test with a simple request
profile = instaloader.Profile.from_username(L.context, 'instagram')
return True
except:
return False
# Use before batch operations
if check_session_health():
print("Session is good, proceed")
else:
print("Session expired, need to login")
Final Thoughts
Instaloader is a solid tool for downloading Instagram content. But it's not magic - Instagram actively fights automated access.
The combination that's worked for me:
- Conservative delays (5-10 seconds minimum)
- Small batches (20-30 posts max)
- Regular rotation (don't hit same profiles daily)
- Session management (reload every 6 hours)
- Error handling (wait 2-5 minutes on failures)
Even with all this, expect to get blocked occasionally. It's just part of using these tools.