Skip to main content

Google Project Zero Issue Downloader

ยท 2 min read

I was not sure if there's any api for monorail, so, I made a small script to download/monitor issues from @ProjectZeroBugs issues ๐Ÿž. Should work for other projects as well.


#!/usr/bin/env python3

# Requirements: pip3 install pyppeteer requests halo

# Usage: ~/ -h
# ๐Ÿง ant4g0nist@ โžœ ~/ -h
# usage: Google Project Zero Issue Downloader [-h] [-f FINDER]
# optional arguments:
# -h, --help show this help message and exit
# -f FINDER, --finder FINDER
# name of the researcher.
# name of the researcher.
# ๐Ÿง ant4g0nist@ โžœ ~/ -f forshaw

import os
import re
import json
import halo
import shutil
import asyncio
import argparse
import requests
from pyppeteer import launch

host = ""

def stripSpecialChars(title):
return re.sub('[^a-zA-Z0-9 \n\.]', '', title)
# return ''.join(e for e in title if e.isalnum())

headers = {
"accept": "application/json",
"accept-language": "en-GB,en-US;q=0.9,en;q=0.8",
"content-type": "application/json",
"sec-ch-ua": "\"Google Chrome\";v=\"95\", \"Chromium\";v=\"95\", \";Not A Brand\";v=\"99\"",
"sec-ch-ua-mobile": "?0",
"sec-ch-ua-platform": "\"macOS\"",
"sec-fetch-dest": "empty",
"sec-fetch-mode": "cors",
"sec-fetch-site": "same-origin",
"credentials": "include",
"x-xsrf-token": "cq9oZbe6pat6f-OdEkHPZDoxNjQ5NTEwNTE5",
"user-agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.75 Safari/537.36"

def fetchIssue(localId, researcher, summary, spinner):
# url = f"{localId}&can=1&q={query}"
url = f"{host}/prpc/monorail.Issues/ListComments"

body = {"issueRef":{"localId":localId, "projectName":"project-zero"}}
re = =url, data=json.dumps(body), headers=headers)
if re.status_code == 200:
results = json.loads(re.text[5:])
comments = results['comments']
initialReport = comments[0]
summary = stripSpecialChars(summary)
spinner.text = f'Downloading: "{summary}"'
os.makedirs(f"reports/{researcher}/{summary}", exist_ok=True)
count = 0
for comment in comments:

if "content" in comment:
content = comment["content"]
with open(f'reports/{researcher}/{summary}/comment_{count}.md' ,'w') as f:

if "attachments" in comment:
attachments = comment["attachments"]
for attachment in attachments:
if "isDeleted" in attachment:
isDeleted = attachment['isDeleted']
if isDeleted:

downloadUrl = attachment["downloadUrl"]
name = attachment["filename"]

# print(f"downloading {name}")

response = requests.get(f"{downloadUrl}", stream=True)

with open(f'reports/{researcher}/{summary}/{name}', 'wb') as out_file:
shutil.copyfileobj(response.raw, out_file)

del response


async def fetchCSRFToken():
url = f"{host}/p/project-zero/issues/list"
browser = await launch()
page = await browser.newPage()
await page.goto(url)
csrf = await page.evaluate(''' window.CS_env.token ''')
await browser.close()
return csrf

async def newMonoRail(researcher, csrf=None):
if csrf:
headers['x-xsrf-token'] = csrf

query = f"finder:{researcher}"
url = f"{host}/prpc/monorail.Issues/ListIssues"

body = {"projectNames":["project-zero"], "query": query,"cannedQuery":1,"pagination":{"start":1, "maxItems":3000}}

re = =url, data =json.dumps(body), headers=headers)

if re.status_code==200:
os.makedirs(f"reports/{researcher}", exist_ok=True)
results = json.loads(re.text[5:])
spinner = halo.Halo('Fetching issues', spinner='dits')
for result in results['issues']:
localId = result['localId']
fetchIssue(localId, researcher, result['summary'], spinner)

csrf = await fetchCSRFToken()

await newMonoRail(researcher, csrf)

if __name__ == '__main__':

parser = argparse.ArgumentParser("Google Project Zero Issue Downloader")
parser.add_argument('-f','--finder', default=None, help='name of the researcher.')
args = parser.parse_args()

finder = args.finder