Batch Invocation
Run many invocations in parallel.
import concurrent.futures
def batch_invoke(client, mcp, action, token, params_list, max_workers=10):
"""
Invoke same action with multiple parameter sets in parallel.
Args:
client: ArmorIQClient instance
mcp: MCP name
action: Action name
token: Intent token
params_list: List of parameter dicts
max_workers: Max concurrent workers
Returns:
List of results in same order as params_list
"""
def invoke_one(params):
try:
return client.invoke(mcp, action, token, params)
except Exception as e:
return {"success": False, "error": str(e)}
with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
futures = [executor.submit(invoke_one, params) for params in params_list]
return [f.result() for f in futures]
# Usage
captured_plan = client.capture_plan(
llm="gpt-4",
prompt="Analyze multiple datasets in parallel"
)
token = client.get_intent_token(captured_plan)["token"]
params_list = [
{"data": [1, 2, 3], "metrics": ["mean"]},
{"data": [4, 5, 6], "metrics": ["median"]},
{"data": [7, 8, 9], "metrics": ["std"]},
# ... 100 total
]
results = batch_invoke(client, "analytics-mcp", "analyze", token, params_list)